Ruff Docstring formatting (#15793)
Signed-off-by: UltralyticsAssistant <web@ultralytics.com> Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
This commit is contained in:
parent
d27664216b
commit
776ca86369
60 changed files with 241 additions and 309 deletions
|
|
@ -228,7 +228,6 @@ class BaseTrainer:
|
|||
|
||||
def _setup_train(self, world_size):
|
||||
"""Builds dataloaders and optimizer on correct rank process."""
|
||||
|
||||
# Model
|
||||
self.run_callbacks("on_pretrain_routine_start")
|
||||
ckpt = self.setup_model()
|
||||
|
|
@ -638,7 +637,7 @@ class BaseTrainer:
|
|||
pass
|
||||
|
||||
def on_plot(self, name, data=None):
|
||||
"""Registers plots (e.g. to be consumed in callbacks)"""
|
||||
"""Registers plots (e.g. to be consumed in callbacks)."""
|
||||
path = Path(name)
|
||||
self.plots[path] = {"data": data, "timestamp": time.time()}
|
||||
|
||||
|
|
@ -737,7 +736,6 @@ class BaseTrainer:
|
|||
Returns:
|
||||
(torch.optim.Optimizer): The constructed optimizer.
|
||||
"""
|
||||
|
||||
g = [], [], [] # optimizer parameter groups
|
||||
bn = tuple(v for k, v in nn.__dict__.items() if "Norm" in k) # normalization layers, i.e. BatchNorm2d()
|
||||
if name == "auto":
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue