ultralytics 8.2.16 DDP pretrained argument fix (#11787)

Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
Co-authored-by: Laughing-q <1185102784@qq.com>
Co-authored-by: Laughing <61612323+Laughing-q@users.noreply.github.com>
This commit is contained in:
Hoonjae Lee 2024-05-16 02:39:26 +09:00 committed by GitHub
parent 590002ff6d
commit b87ea6ab22
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 6 additions and 9 deletions

View file

@ -527,13 +527,13 @@ class BaseTrainer:
if isinstance(self.model, torch.nn.Module): # if model is loaded beforehand. No setup needed
return
model, weights = self.model, None
cfg, weights = self.model, None
ckpt = None
if str(model).endswith(".pt"):
weights, ckpt = attempt_load_one_weight(model)
if str(self.model).endswith(".pt"):
weights, ckpt = attempt_load_one_weight(self.model)
cfg = weights.yaml
else:
cfg = model
elif isinstance(self.args.pretrained, (str, Path)):
weights, _ = attempt_load_one_weight(self.args.pretrained)
self.model = self.get_model(cfg=cfg, weights=weights, verbose=RANK == -1) # calls Model(cfg, weights)
return ckpt