Feature: Create HUB Models from CLI or Python Script (#7316)

Co-authored-by: Hassaan Farooq <103611273+hassaanfarooq01@users.noreply.github.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
This commit is contained in:
Kalen Michael 2024-01-10 02:36:14 +01:00 committed by GitHub
parent a92adf8231
commit b54055a2c7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 356 additions and 154 deletions

View file

@ -3,7 +3,9 @@
import json
from time import time
from ultralytics.hub.utils import HUB_WEB_ROOT, PREFIX, events
from hub_sdk.config import HUB_WEB_ROOT
from ultralytics.hub.utils import PREFIX, events
from ultralytics.utils import LOGGER, SETTINGS
@ -12,8 +14,9 @@ def on_pretrain_routine_end(trainer):
session = getattr(trainer, 'hub_session', None)
if session:
# Start timer for upload rate limit
LOGGER.info(f'{PREFIX}View model at {HUB_WEB_ROOT}/models/{session.model_id} 🚀')
session.timers = {'metrics': time(), 'ckpt': time()} # start timer on session.rate_limit
session.timers = {
'metrics': time(),
'ckpt': time(), } # start timer on session.rate_limit
def on_fit_epoch_end(trainer):
@ -21,10 +24,13 @@ def on_fit_epoch_end(trainer):
session = getattr(trainer, 'hub_session', None)
if session:
# Upload metrics after val end
all_plots = {**trainer.label_loss_items(trainer.tloss, prefix='train'), **trainer.metrics}
all_plots = {
**trainer.label_loss_items(trainer.tloss, prefix='train'),
**trainer.metrics, }
if trainer.epoch == 0:
from ultralytics.utils.torch_utils import model_info_for_loggers
all_plots = {**all_plots, **model_info_for_loggers(trainer)}
session.metrics_queue[trainer.epoch] = json.dumps(all_plots)
if time() - session.timers['metrics'] > session.rate_limits['metrics']:
session.upload_metrics()
@ -39,7 +45,7 @@ def on_model_save(trainer):
# Upload checkpoints with rate limiting
is_best = trainer.best_fitness == trainer.fitness
if time() - session.timers['ckpt'] > session.rate_limits['ckpt']:
LOGGER.info(f'{PREFIX}Uploading checkpoint {HUB_WEB_ROOT}/models/{session.model_id}')
LOGGER.info(f'{PREFIX}Uploading checkpoint {HUB_WEB_ROOT}/models/{session.model_file}')
session.upload_model(trainer.epoch, trainer.last, is_best)
session.timers['ckpt'] = time() # reset timer
@ -50,10 +56,15 @@ def on_train_end(trainer):
if session:
# Upload final model and metrics with exponential standoff
LOGGER.info(f'{PREFIX}Syncing final model...')
session.upload_model(trainer.epoch, trainer.best, map=trainer.metrics.get('metrics/mAP50-95(B)', 0), final=True)
session.upload_model(
trainer.epoch,
trainer.best,
map=trainer.metrics.get('metrics/mAP50-95(B)', 0),
final=True,
)
session.alive = False # stop heartbeats
LOGGER.info(f'{PREFIX}Done ✅\n'
f'{PREFIX}View model at {HUB_WEB_ROOT}/models/{session.model_id} 🚀')
f'{PREFIX}View model at {session.model_url} 🚀')
def on_train_start(trainer):
@ -76,7 +87,7 @@ def on_export_start(exporter):
events(exporter.args)
callbacks = {
callbacks = ({
'on_pretrain_routine_end': on_pretrain_routine_end,
'on_fit_epoch_end': on_fit_epoch_end,
'on_model_save': on_model_save,
@ -84,4 +95,4 @@ callbacks = {
'on_train_start': on_train_start,
'on_val_start': on_val_start,
'on_predict_start': on_predict_start,
'on_export_start': on_export_start} if SETTINGS['hub'] is True else {} # verify enabled
'on_export_start': on_export_start, } if SETTINGS['hub'] is True else {}) # verify enabled