PyCharm Code Inspect fixes (#18392)
Signed-off-by: UltralyticsAssistant <web@ultralytics.com> Signed-off-by: Glenn Jocher <glenn.jocher@ultralytics.com> Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
This commit is contained in:
parent
d35860d4a1
commit
e5e91967d9
31 changed files with 72 additions and 72 deletions
|
|
@ -134,14 +134,18 @@ Here's an example of how to freeze BatchNorm statistics when freezing layers wit
|
|||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
|
||||
# Add a callback to put the frozen layers in eval mode to prevent BN values from changing
|
||||
def put_in_eval_mode(trainer):
|
||||
n_layers = trainer.args.freeze
|
||||
if not isinstance(n_layers, int): return
|
||||
for i, (name, module) in enumerate(trainer.model.named_modules()):
|
||||
if name.endswith("bn") and int(name.split('.')[1]) < n_layers:
|
||||
module.eval()
|
||||
module.track_running_stats = False
|
||||
n_layers = trainer.args.freeze
|
||||
if not isinstance(n_layers, int):
|
||||
return
|
||||
|
||||
for i, (name, module) in enumerate(trainer.model.named_modules()):
|
||||
if name.endswith("bn") and int(name.split(".")[1]) < n_layers:
|
||||
module.eval()
|
||||
module.track_running_stats = False
|
||||
|
||||
|
||||
model = YOLO("yolo11n.pt")
|
||||
model.add_callback("on_train_epoch_start", put_in_eval_mode)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue