Favor EMA over model in train checkpoints (#9433)
Signed-off-by: Glenn Jocher <glenn.jocher@ultralytics.com>
This commit is contained in:
parent
479afce4a8
commit
7ea2007326
1 changed files with 2 additions and 3 deletions
|
|
@ -42,7 +42,6 @@ from ultralytics.utils.files import get_latest_run
|
|||
from ultralytics.utils.torch_utils import (
|
||||
EarlyStopping,
|
||||
ModelEMA,
|
||||
de_parallel,
|
||||
init_seeds,
|
||||
one_cycle,
|
||||
select_device,
|
||||
|
|
@ -486,7 +485,7 @@ class BaseTrainer:
|
|||
{
|
||||
"epoch": self.epoch,
|
||||
"best_fitness": self.best_fitness,
|
||||
"model": deepcopy(de_parallel(self.model)).half(),
|
||||
"model": None, # resume and final checkpoints derive from EMA
|
||||
"ema": deepcopy(self.ema.ema).half(),
|
||||
"updates": self.ema.updates,
|
||||
"optimizer": self.optimizer.state_dict(),
|
||||
|
|
@ -527,7 +526,7 @@ class BaseTrainer:
|
|||
ckpt = None
|
||||
if str(model).endswith(".pt"):
|
||||
weights, ckpt = attempt_load_one_weight(model)
|
||||
cfg = ckpt["model"].yaml
|
||||
cfg = weights.yaml
|
||||
else:
|
||||
cfg = model
|
||||
self.model = self.get_model(cfg=cfg, weights=weights, verbose=RANK == -1) # calls Model(cfg, weights)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue