PyCharm Code Inspect fixes (#18392)

Signed-off-by: UltralyticsAssistant <web@ultralytics.com>
Signed-off-by: Glenn Jocher <glenn.jocher@ultralytics.com>
Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
This commit is contained in:
Glenn Jocher 2024-12-25 16:24:29 +01:00 committed by GitHub
parent d35860d4a1
commit e5e91967d9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 72 additions and 72 deletions

View file

@ -134,14 +134,18 @@ Here's an example of how to freeze BatchNorm statistics when freezing layers wit
```python
from ultralytics import YOLO
# Add a callback to put the frozen layers in eval mode to prevent BN values from changing
def put_in_eval_mode(trainer):
n_layers = trainer.args.freeze
if not isinstance(n_layers, int): return
  for i, (name, module) in enumerate(trainer.model.named_modules()):
    if name.endswith("bn") and int(name.split('.')[1]) < n_layers:
      module.eval()
      module.track_running_stats = False
n_layers = trainer.args.freeze
if not isinstance(n_layers, int):
return
for i, (name, module) in enumerate(trainer.model.named_modules()):
if name.endswith("bn") and int(name.split(".")[1]) < n_layers:
module.eval()
module.track_running_stats = False
model = YOLO("yolo11n.pt")
model.add_callback("on_train_epoch_start", put_in_eval_mode)