ultralytics 8.0.167 Tuner updates and HUB Pose and Classify fixes (#4656)
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
parent
8596ee241f
commit
d2cf7acce0
21 changed files with 174 additions and 144 deletions
|
|
@ -325,6 +325,7 @@ def yaml_load(file='data.yaml', append_filename=False):
|
|||
Returns:
|
||||
(dict): YAML data and file name.
|
||||
"""
|
||||
assert Path(file).suffix in ('.yaml', '.yml'), f'Attempting to load non-YAML file {file} with yaml_load()'
|
||||
with open(file, errors='ignore', encoding='utf-8') as f:
|
||||
s = f.read() # string
|
||||
|
||||
|
|
|
|||
|
|
@ -9,14 +9,14 @@ from ultralytics.utils import LOGGER, SETTINGS, TESTS_RUNNING
|
|||
from ultralytics.utils.torch_utils import model_info_for_loggers
|
||||
|
||||
try:
|
||||
assert not TESTS_RUNNING # do not log pytest
|
||||
assert SETTINGS['clearml'] is True # verify integration is enabled
|
||||
import clearml
|
||||
from clearml import Task
|
||||
from clearml.binding.frameworks.pytorch_bind import PatchPyTorchModelIO
|
||||
from clearml.binding.matplotlib_bind import PatchedMatplotlib
|
||||
|
||||
assert hasattr(clearml, '__version__') # verify package is not directory
|
||||
assert not TESTS_RUNNING # do not log pytest
|
||||
assert SETTINGS['clearml'] is True # verify integration is enabled
|
||||
except (ImportError, AssertionError):
|
||||
clearml = None
|
||||
|
||||
|
|
|
|||
|
|
@ -7,11 +7,11 @@ from ultralytics.utils import LOGGER, RANK, SETTINGS, TESTS_RUNNING, ops
|
|||
from ultralytics.utils.torch_utils import model_info_for_loggers
|
||||
|
||||
try:
|
||||
assert not TESTS_RUNNING # do not log pytest
|
||||
assert SETTINGS['comet'] is True # verify integration is enabled
|
||||
import comet_ml
|
||||
|
||||
assert not TESTS_RUNNING # do not log pytest
|
||||
assert hasattr(comet_ml, '__version__') # verify package is not directory
|
||||
assert SETTINGS['comet'] is True # verify integration is enabled
|
||||
except (ImportError, AssertionError):
|
||||
comet_ml = None
|
||||
|
||||
|
|
|
|||
|
|
@ -10,13 +10,12 @@ from ultralytics.utils import LOGGER, SETTINGS, TESTS_RUNNING
|
|||
from ultralytics.utils.torch_utils import model_info_for_loggers
|
||||
|
||||
try:
|
||||
assert not TESTS_RUNNING # do not log pytest
|
||||
assert SETTINGS['dvc'] is True # verify integration is enabled
|
||||
from importlib.metadata import version
|
||||
|
||||
import dvclive
|
||||
|
||||
assert not TESTS_RUNNING # do not log pytest
|
||||
assert SETTINGS['dvc'] is True # verify integration is enabled
|
||||
|
||||
ver = version('dvclive')
|
||||
if pkg.parse_version(ver) < pkg.parse_version('2.11.0'):
|
||||
LOGGER.debug(f'DVCLive is detected but version {ver} is incompatible (>=2.11 required).')
|
||||
|
|
|
|||
|
|
@ -7,11 +7,11 @@ from pathlib import Path
|
|||
from ultralytics.utils import LOGGER, SETTINGS, TESTS_RUNNING, colorstr
|
||||
|
||||
try:
|
||||
assert not TESTS_RUNNING # do not log pytest
|
||||
assert SETTINGS['mlflow'] is True # verify integration is enabled
|
||||
import mlflow
|
||||
|
||||
assert not TESTS_RUNNING # do not log pytest
|
||||
assert hasattr(mlflow, '__version__') # verify package is not directory
|
||||
assert SETTINGS['mlflow'] is True # verify integration is enabled
|
||||
except (ImportError, AssertionError):
|
||||
mlflow = None
|
||||
|
||||
|
|
|
|||
|
|
@ -7,12 +7,12 @@ from ultralytics.utils import LOGGER, SETTINGS, TESTS_RUNNING
|
|||
from ultralytics.utils.torch_utils import model_info_for_loggers
|
||||
|
||||
try:
|
||||
assert not TESTS_RUNNING # do not log pytest
|
||||
assert SETTINGS['neptune'] is True # verify integration is enabled
|
||||
import neptune
|
||||
from neptune.types import File
|
||||
|
||||
assert not TESTS_RUNNING # do not log pytest
|
||||
assert hasattr(neptune, '__version__')
|
||||
assert SETTINGS['neptune'] is True # verify integration is enabled
|
||||
except (ImportError, AssertionError):
|
||||
neptune = None
|
||||
|
||||
|
|
|
|||
|
|
@ -3,11 +3,10 @@
|
|||
from ultralytics.utils import SETTINGS
|
||||
|
||||
try:
|
||||
assert SETTINGS['raytune'] is True # verify integration is enabled
|
||||
import ray
|
||||
from ray import tune
|
||||
from ray.air import session
|
||||
|
||||
assert SETTINGS['raytune'] is True # verify integration is enabled
|
||||
except (ImportError, AssertionError):
|
||||
tune = None
|
||||
|
||||
|
|
|
|||
|
|
@ -3,11 +3,9 @@
|
|||
from ultralytics.utils import LOGGER, SETTINGS, TESTS_RUNNING, colorstr
|
||||
|
||||
try:
|
||||
from torch.utils.tensorboard import SummaryWriter
|
||||
|
||||
assert not TESTS_RUNNING # do not log pytest
|
||||
assert SETTINGS['tensorboard'] is True # verify integration is enabled
|
||||
|
||||
from torch.utils.tensorboard import SummaryWriter
|
||||
# TypeError for handling 'Descriptors cannot not be created directly.' protobuf errors in Windows
|
||||
except (ImportError, AssertionError, TypeError):
|
||||
SummaryWriter = None
|
||||
|
|
|
|||
|
|
@ -4,11 +4,11 @@ from ultralytics.utils import SETTINGS, TESTS_RUNNING
|
|||
from ultralytics.utils.torch_utils import model_info_for_loggers
|
||||
|
||||
try:
|
||||
assert not TESTS_RUNNING # do not log pytest
|
||||
assert SETTINGS['wandb'] is True # verify integration is enabled
|
||||
import wandb as wb
|
||||
|
||||
assert hasattr(wb, '__version__')
|
||||
assert not TESTS_RUNNING # do not log pytest
|
||||
assert SETTINGS['wandb'] is True # verify integration is enabled
|
||||
except (ImportError, AssertionError):
|
||||
wb = None
|
||||
|
||||
|
|
|
|||
|
|
@ -160,7 +160,7 @@ def unzip_file(file, path=None, exclude=('.DS_Store', '__MACOSX'), exist_ok=Fals
|
|||
# Check if destination directory already exists and contains files
|
||||
if path.exists() and any(path.iterdir()) and not exist_ok:
|
||||
# If it exists and is not empty, return the path without unzipping
|
||||
LOGGER.info(f'Skipping {file} unzip (already unzipped)')
|
||||
LOGGER.warning(f'WARNING ⚠️ Skipping {file} unzip as destination directory {path} is not empty.')
|
||||
return path
|
||||
|
||||
for f in tqdm(files,
|
||||
|
|
@ -185,22 +185,25 @@ def check_disk_space(url='https://ultralytics.com/assets/coco128.zip', sf=1.5, h
|
|||
Returns:
|
||||
(bool): True if there is sufficient disk space, False otherwise.
|
||||
"""
|
||||
with contextlib.suppress(Exception):
|
||||
gib = 1 << 30 # bytes per GiB
|
||||
data = int(requests.head(url).headers['Content-Length']) / gib # file size (GB)
|
||||
total, used, free = (x / gib for x in shutil.disk_usage('/')) # bytes
|
||||
if data * sf < free:
|
||||
return True # sufficient space
|
||||
r = requests.head(url) # response
|
||||
|
||||
# Insufficient space
|
||||
text = (f'WARNING ⚠️ Insufficient free disk space {free:.1f} GB < {data * sf:.3f} GB required, '
|
||||
f'Please free {data * sf - free:.1f} GB additional disk space and try again.')
|
||||
if hard:
|
||||
raise MemoryError(text)
|
||||
LOGGER.warning(text)
|
||||
return False
|
||||
# Check response
|
||||
assert r.status_code < 400, f'URL error for {url}: {r.status_code} {r.reason}'
|
||||
|
||||
return True
|
||||
# Check file size
|
||||
gib = 1 << 30 # bytes per GiB
|
||||
data = int(r.headers.get('Content-Length', 0)) / gib # file size (GB)
|
||||
total, used, free = (x / gib for x in shutil.disk_usage('/')) # bytes
|
||||
if data * sf < free:
|
||||
return True # sufficient space
|
||||
|
||||
# Insufficient space
|
||||
text = (f'WARNING ⚠️ Insufficient free disk space {free:.1f} GB < {data * sf:.3f} GB required, '
|
||||
f'Please free {data * sf - free:.1f} GB additional disk space and try again.')
|
||||
if hard:
|
||||
raise MemoryError(text)
|
||||
LOGGER.warning(text)
|
||||
return False
|
||||
|
||||
|
||||
def get_google_drive_file_info(link):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue