ultralytics 8.0.188 fix .grad attribute leaf Tensor Warning (#5094)

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
Glenn Jocher 2023-09-26 20:28:45 +02:00 committed by GitHub
parent f2ed207571
commit 19c3314e68
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 78 additions and 41 deletions

View file

@ -327,8 +327,9 @@ def yaml_save(file='data.yaml', data=None, header=''):
file.parent.mkdir(parents=True, exist_ok=True)
# Convert Path objects to strings
valid_types = int, float, str, bool, list, tuple, dict, type(None)
for k, v in data.items():
if isinstance(v, Path):
if not isinstance(v, valid_types):
data[k] = str(v)
# Dump data to file in YAML format

View file

@ -55,7 +55,7 @@ def parse_requirements(file_path=ROOT.parent / 'requirements.txt', package=''):
line = line.strip()
if line and not line.startswith('#'):
line = line.split('#')[0].strip() # ignore inline comments
match = re.match(r'([a-zA-Z0-9-_]+)([<>!=~]+.*)?', line)
match = re.match(r'([a-zA-Z0-9-_]+)\s*([<>!=~]+.*)?', line)
if match:
requirements.append(SimpleNamespace(name=match[1], specifier=match[2].strip() if match[2] else ''))

View file

@ -44,7 +44,10 @@ def smart_inference_mode():
def decorate(fn):
"""Applies appropriate torch decorator for inference mode based on torch version."""
return (torch.inference_mode if TORCH_1_9 else torch.no_grad)()(fn)
if TORCH_1_9 and torch.is_inference_mode_enabled():
return fn # already in inference_mode, act as a pass-through
else:
return (torch.inference_mode if TORCH_1_9 else torch.no_grad)()(fn)
return decorate