Use trainer.amp to determine FP16 validation (#16333)

Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
This commit is contained in:
Laughing 2024-09-18 16:09:56 +08:00 committed by GitHub
parent 80c286736b
commit 225e6e2b25
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -111,7 +111,7 @@ class BaseValidator:
self.device = trainer.device
self.data = trainer.data
# force FP16 val during training
self.args.half = self.device.type != "cpu" and self.args.amp
self.args.half = self.device.type != "cpu" and trainer.amp
model = trainer.ema.ema or trainer.model
model = model.half() if self.args.half else model.float()
# self.model = model