ultralytics 8.0.239 Ultralytics Actions and hub-sdk adoption (#7431)

Signed-off-by: Glenn Jocher <glenn.jocher@ultralytics.com>
Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
Co-authored-by: Burhan <62214284+Burhan-Q@users.noreply.github.com>
Co-authored-by: Kayzwer <68285002+Kayzwer@users.noreply.github.com>
This commit is contained in:
Glenn Jocher 2024-01-10 03:16:08 +01:00 committed by GitHub
parent e795277391
commit fe27db2f6e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
139 changed files with 6870 additions and 5125 deletions

View file

@ -21,10 +21,10 @@ def login(api_key: str = None, save=True) -> bool:
Returns:
bool: True if authentication is successful, False otherwise.
"""
api_key_url = f'{HUB_WEB_ROOT}/settings?tab=api+keys' # Set the redirect URL
saved_key = SETTINGS.get('api_key')
api_key_url = f"{HUB_WEB_ROOT}/settings?tab=api+keys" # set the redirect URL
saved_key = SETTINGS.get("api_key")
active_key = api_key or saved_key
credentials = {'api_key': active_key} if active_key and active_key != '' else None # Set credentials
credentials = {"api_key": active_key} if active_key and active_key != "" else None # set credentials
client = HUBClient(credentials) # initialize HUBClient
@ -32,17 +32,18 @@ def login(api_key: str = None, save=True) -> bool:
# Successfully authenticated with HUB
if save and client.api_key != saved_key:
SETTINGS.update({'api_key': client.api_key}) # update settings with valid API key
SETTINGS.update({"api_key": client.api_key}) # update settings with valid API key
# Set message based on whether key was provided or retrieved from settings
log_message = ('New authentication successful ✅'
if client.api_key == api_key or not credentials else 'Authenticated ✅')
LOGGER.info(f'{PREFIX}{log_message}')
log_message = (
"New authentication successful ✅" if client.api_key == api_key or not credentials else "Authenticated ✅"
)
LOGGER.info(f"{PREFIX}{log_message}")
return True
else:
# Failed to authenticate with HUB
LOGGER.info(f'{PREFIX}Retrieve API key from {api_key_url}')
LOGGER.info(f"{PREFIX}Retrieve API key from {api_key_url}")
return False
@ -57,50 +58,50 @@ def logout():
hub.logout()
```
"""
SETTINGS['api_key'] = ''
SETTINGS["api_key"] = ""
SETTINGS.save()
LOGGER.info(f"{PREFIX}logged out ✅. To log in again, use 'yolo hub login'.")
def reset_model(model_id=''):
def reset_model(model_id=""):
"""Reset a trained model to an untrained state."""
r = requests.post(f'{HUB_API_ROOT}/model-reset', json={'modelId': model_id}, headers={'x-api-key': Auth().api_key})
r = requests.post(f"{HUB_API_ROOT}/model-reset", json={"modelId": model_id}, headers={"x-api-key": Auth().api_key})
if r.status_code == 200:
LOGGER.info(f'{PREFIX}Model reset successfully')
LOGGER.info(f"{PREFIX}Model reset successfully")
return
LOGGER.warning(f'{PREFIX}Model reset failure {r.status_code} {r.reason}')
LOGGER.warning(f"{PREFIX}Model reset failure {r.status_code} {r.reason}")
def export_fmts_hub():
"""Returns a list of HUB-supported export formats."""
from ultralytics.engine.exporter import export_formats
return list(export_formats()['Argument'][1:]) + ['ultralytics_tflite', 'ultralytics_coreml']
return list(export_formats()["Argument"][1:]) + ["ultralytics_tflite", "ultralytics_coreml"]
def export_model(model_id='', format='torchscript'):
def export_model(model_id="", format="torchscript"):
"""Export a model to all formats."""
assert format in export_fmts_hub(), f"Unsupported export format '{format}', valid formats are {export_fmts_hub()}"
r = requests.post(f'{HUB_API_ROOT}/v1/models/{model_id}/export',
json={'format': format},
headers={'x-api-key': Auth().api_key})
assert r.status_code == 200, f'{PREFIX}{format} export failure {r.status_code} {r.reason}'
LOGGER.info(f'{PREFIX}{format} export started ✅')
r = requests.post(
f"{HUB_API_ROOT}/v1/models/{model_id}/export", json={"format": format}, headers={"x-api-key": Auth().api_key}
)
assert r.status_code == 200, f"{PREFIX}{format} export failure {r.status_code} {r.reason}"
LOGGER.info(f"{PREFIX}{format} export started ✅")
def get_export(model_id='', format='torchscript'):
def get_export(model_id="", format="torchscript"):
"""Get an exported model dictionary with download URL."""
assert format in export_fmts_hub(), f"Unsupported export format '{format}', valid formats are {export_fmts_hub()}"
r = requests.post(f'{HUB_API_ROOT}/get-export',
json={
'apiKey': Auth().api_key,
'modelId': model_id,
'format': format},
headers={'x-api-key': Auth().api_key})
assert r.status_code == 200, f'{PREFIX}{format} get_export failure {r.status_code} {r.reason}'
r = requests.post(
f"{HUB_API_ROOT}/get-export",
json={"apiKey": Auth().api_key, "modelId": model_id, "format": format},
headers={"x-api-key": Auth().api_key},
)
assert r.status_code == 200, f"{PREFIX}{format} get_export failure {r.status_code} {r.reason}"
return r.json()
def check_dataset(path='', task='detect'):
def check_dataset(path="", task="detect"):
"""
Function for error-checking HUB dataset Zip file before upload. It checks a dataset for errors before it is uploaded
to the HUB. Usage examples are given below.
@ -119,4 +120,4 @@ def check_dataset(path='', task='detect'):
```
"""
HUBDatasetStats(path=path, task=task).get_json()
LOGGER.info(f'Checks completed correctly ✅. Upload this dataset to {HUB_WEB_ROOT}/datasets/.')
LOGGER.info(f"Checks completed correctly ✅. Upload this dataset to {HUB_WEB_ROOT}/datasets/.")

View file

@ -6,7 +6,7 @@ from hub_sdk import HUB_API_ROOT, HUB_WEB_ROOT
from ultralytics.hub.utils import PREFIX, request_with_credentials
from ultralytics.utils import LOGGER, SETTINGS, emojis, is_colab
API_KEY_URL = f'{HUB_WEB_ROOT}/settings?tab=api+keys'
API_KEY_URL = f"{HUB_WEB_ROOT}/settings?tab=api+keys"
class Auth:
@ -23,9 +23,10 @@ class Auth:
api_key (str or bool): API key for authentication, initialized as False.
model_key (bool): Placeholder for model key, initialized as False.
"""
id_token = api_key = model_key = False
def __init__(self, api_key='', verbose=False):
def __init__(self, api_key="", verbose=False):
"""
Initialize the Auth class with an optional API key.
@ -33,18 +34,18 @@ class Auth:
api_key (str, optional): May be an API key or a combination API key and model ID, i.e. key_id
"""
# Split the input API key in case it contains a combined key_model and keep only the API key part
api_key = api_key.split('_')[0]
api_key = api_key.split("_")[0]
# Set API key attribute as value passed or SETTINGS API key if none passed
self.api_key = api_key or SETTINGS.get('api_key', '')
self.api_key = api_key or SETTINGS.get("api_key", "")
# If an API key is provided
if self.api_key:
# If the provided API key matches the API key in the SETTINGS
if self.api_key == SETTINGS.get('api_key'):
if self.api_key == SETTINGS.get("api_key"):
# Log that the user is already logged in
if verbose:
LOGGER.info(f'{PREFIX}Authenticated ✅')
LOGGER.info(f"{PREFIX}Authenticated ✅")
return
else:
# Attempt to authenticate with the provided API key
@ -59,12 +60,12 @@ class Auth:
# Update SETTINGS with the new API key after successful authentication
if success:
SETTINGS.update({'api_key': self.api_key})
SETTINGS.update({"api_key": self.api_key})
# Log that the new login was successful
if verbose:
LOGGER.info(f'{PREFIX}New authentication successful ✅')
LOGGER.info(f"{PREFIX}New authentication successful ✅")
elif verbose:
LOGGER.info(f'{PREFIX}Retrieve API key from {API_KEY_URL}')
LOGGER.info(f"{PREFIX}Retrieve API key from {API_KEY_URL}")
def request_api_key(self, max_attempts=3):
"""
@ -73,13 +74,14 @@ class Auth:
Returns the model ID.
"""
import getpass
for attempts in range(max_attempts):
LOGGER.info(f'{PREFIX}Login. Attempt {attempts + 1} of {max_attempts}')
input_key = getpass.getpass(f'Enter API key from {API_KEY_URL} ')
self.api_key = input_key.split('_')[0] # remove model id if present
LOGGER.info(f"{PREFIX}Login. Attempt {attempts + 1} of {max_attempts}")
input_key = getpass.getpass(f"Enter API key from {API_KEY_URL} ")
self.api_key = input_key.split("_")[0] # remove model id if present
if self.authenticate():
return True
raise ConnectionError(emojis(f'{PREFIX}Failed to authenticate ❌'))
raise ConnectionError(emojis(f"{PREFIX}Failed to authenticate ❌"))
def authenticate(self) -> bool:
"""
@ -90,14 +92,14 @@ class Auth:
"""
try:
if header := self.get_auth_header():
r = requests.post(f'{HUB_API_ROOT}/v1/auth', headers=header)
if not r.json().get('success', False):
raise ConnectionError('Unable to authenticate.')
r = requests.post(f"{HUB_API_ROOT}/v1/auth", headers=header)
if not r.json().get("success", False):
raise ConnectionError("Unable to authenticate.")
return True
raise ConnectionError('User has not authenticated locally.')
raise ConnectionError("User has not authenticated locally.")
except ConnectionError:
self.id_token = self.api_key = False # reset invalid
LOGGER.warning(f'{PREFIX}Invalid API key ⚠️')
LOGGER.warning(f"{PREFIX}Invalid API key ⚠️")
return False
def auth_with_cookies(self) -> bool:
@ -111,12 +113,12 @@ class Auth:
if not is_colab():
return False # Currently only works with Colab
try:
authn = request_with_credentials(f'{HUB_API_ROOT}/v1/auth/auto')
if authn.get('success', False):
self.id_token = authn.get('data', {}).get('idToken', None)
authn = request_with_credentials(f"{HUB_API_ROOT}/v1/auth/auto")
if authn.get("success", False):
self.id_token = authn.get("data", {}).get("idToken", None)
self.authenticate()
return True
raise ConnectionError('Unable to fetch browser authentication details.')
raise ConnectionError("Unable to fetch browser authentication details.")
except ConnectionError:
self.id_token = False # reset invalid
return False
@ -129,7 +131,7 @@ class Auth:
(dict): The authentication header if id_token or API key is set, None otherwise.
"""
if self.id_token:
return {'authorization': f'Bearer {self.id_token}'}
return {"authorization": f"Bearer {self.id_token}"}
elif self.api_key:
return {'x-api-key': self.api_key}
return {"x-api-key": self.api_key}
# else returns None

View file

@ -12,16 +12,13 @@ from ultralytics.hub.utils import HELP_MSG, PREFIX, TQDM
from ultralytics.utils import LOGGER, SETTINGS, __version__, checks, emojis, is_colab
from ultralytics.utils.errors import HUBModelError
AGENT_NAME = (f'python-{__version__}-colab' if is_colab() else f'python-{__version__}-local')
AGENT_NAME = f"python-{__version__}-colab" if is_colab() else f"python-{__version__}-local"
class HUBTrainingSession:
"""
HUB training session for Ultralytics HUB YOLO models. Handles model initialization, heartbeats, and checkpointing.
Args:
url (str): Model identifier used to initialize the HUB training session.
Attributes:
agent_id (str): Identifier for the instance communicating with the server.
model_id (str): Identifier for the YOLO model being trained.
@ -40,17 +37,18 @@ class HUBTrainingSession:
Initialize the HUBTrainingSession with the provided model identifier.
Args:
url (str): Model identifier used to initialize the HUB training session.
It can be a URL string or a model key with specific format.
identifier (str): Model identifier used to initialize the HUB training session.
It can be a URL string or a model key with specific format.
Raises:
ValueError: If the provided model identifier is invalid.
ConnectionError: If connecting with global API key is not supported.
"""
self.rate_limits = {
'metrics': 3.0,
'ckpt': 900.0,
'heartbeat': 300.0, } # rate limits (seconds)
"metrics": 3.0,
"ckpt": 900.0,
"heartbeat": 300.0,
} # rate limits (seconds)
self.metrics_queue = {} # holds metrics for each epoch until upload
self.timers = {} # holds timers in ultralytics/utils/callbacks/hub.py
@ -58,8 +56,8 @@ class HUBTrainingSession:
api_key, model_id, self.filename = self._parse_identifier(identifier)
# Get credentials
active_key = api_key or SETTINGS.get('api_key')
credentials = {'api_key': active_key} if active_key else None # set credentials
active_key = api_key or SETTINGS.get("api_key")
credentials = {"api_key": active_key} if active_key else None # set credentials
# Initialize client
self.client = HUBClient(credentials)
@ -72,35 +70,37 @@ class HUBTrainingSession:
def load_model(self, model_id):
# Initialize model
self.model = self.client.model(model_id)
self.model_url = f'{HUB_WEB_ROOT}/models/{self.model.id}'
self.model_url = f"{HUB_WEB_ROOT}/models/{self.model.id}"
self._set_train_args()
# Start heartbeats for HUB to monitor agent
self.model.start_heartbeat(self.rate_limits['heartbeat'])
LOGGER.info(f'{PREFIX}View model at {self.model_url} 🚀')
self.model.start_heartbeat(self.rate_limits["heartbeat"])
LOGGER.info(f"{PREFIX}View model at {self.model_url} 🚀")
def create_model(self, model_args):
# Initialize model
payload = {
'config': {
'batchSize': model_args.get('batch', -1),
'epochs': model_args.get('epochs', 300),
'imageSize': model_args.get('imgsz', 640),
'patience': model_args.get('patience', 100),
'device': model_args.get('device', ''),
'cache': model_args.get('cache', 'ram'), },
'dataset': {
'name': model_args.get('data')},
'lineage': {
'architecture': {
'name': self.filename.replace('.pt', '').replace('.yaml', ''), },
'parent': {}, },
'meta': {
'name': self.filename}, }
"config": {
"batchSize": model_args.get("batch", -1),
"epochs": model_args.get("epochs", 300),
"imageSize": model_args.get("imgsz", 640),
"patience": model_args.get("patience", 100),
"device": model_args.get("device", ""),
"cache": model_args.get("cache", "ram"),
},
"dataset": {"name": model_args.get("data")},
"lineage": {
"architecture": {
"name": self.filename.replace(".pt", "").replace(".yaml", ""),
},
"parent": {},
},
"meta": {"name": self.filename},
}
if self.filename.endswith('.pt'):
payload['lineage']['parent']['name'] = self.filename
if self.filename.endswith(".pt"):
payload["lineage"]["parent"]["name"] = self.filename
self.model.create_model(payload)
@ -109,12 +109,12 @@ class HUBTrainingSession:
if not self.model.id:
return
self.model_url = f'{HUB_WEB_ROOT}/models/{self.model.id}'
self.model_url = f"{HUB_WEB_ROOT}/models/{self.model.id}"
# Start heartbeats for HUB to monitor agent
self.model.start_heartbeat(self.rate_limits['heartbeat'])
self.model.start_heartbeat(self.rate_limits["heartbeat"])
LOGGER.info(f'{PREFIX}View model at {self.model_url} 🚀')
LOGGER.info(f"{PREFIX}View model at {self.model_url} 🚀")
def _parse_identifier(self, identifier):
"""
@ -125,13 +125,13 @@ class HUBTrainingSession:
- An identifier containing an API key and a model ID separated by an underscore
- An identifier that is solely a model ID of a fixed length
- A local filename that ends with '.pt' or '.yaml'
Args:
identifier (str): The identifier string to be parsed.
Returns:
(tuple): A tuple containing the API key, model ID, and filename as applicable.
Raises:
HUBModelError: If the identifier format is not recognized.
"""
@ -140,12 +140,12 @@ class HUBTrainingSession:
api_key, model_id, filename = None, None, None
# Check if identifier is a HUB URL
if identifier.startswith(f'{HUB_WEB_ROOT}/models/'):
if identifier.startswith(f"{HUB_WEB_ROOT}/models/"):
# Extract the model_id after the HUB_WEB_ROOT URL
model_id = identifier.split(f'{HUB_WEB_ROOT}/models/')[-1]
model_id = identifier.split(f"{HUB_WEB_ROOT}/models/")[-1]
else:
# Split the identifier based on underscores only if it's not a HUB URL
parts = identifier.split('_')
parts = identifier.split("_")
# Check if identifier is in the format of API key and model ID
if len(parts) == 2 and len(parts[0]) == 42 and len(parts[1]) == 20:
@ -154,43 +154,46 @@ class HUBTrainingSession:
elif len(parts) == 1 and len(parts[0]) == 20:
model_id = parts[0]
# Check if identifier is a local filename
elif identifier.endswith('.pt') or identifier.endswith('.yaml'):
elif identifier.endswith(".pt") or identifier.endswith(".yaml"):
filename = identifier
else:
raise HUBModelError(
f"model='{identifier}' could not be parsed. Check format is correct. "
f'Supported formats are Ultralytics HUB URL, apiKey_modelId, modelId, local pt or yaml file.')
f"Supported formats are Ultralytics HUB URL, apiKey_modelId, modelId, local pt or yaml file."
)
return api_key, model_id, filename
def _set_train_args(self, **kwargs):
if self.model.is_trained():
# Model is already trained
raise ValueError(emojis(f'Model is already trained and uploaded to {self.model_url} 🚀'))
raise ValueError(emojis(f"Model is already trained and uploaded to {self.model_url} 🚀"))
if self.model.is_resumable():
# Model has saved weights
self.train_args = {'data': self.model.get_dataset_url(), 'resume': True}
self.model_file = self.model.get_weights_url('last')
self.train_args = {"data": self.model.get_dataset_url(), "resume": True}
self.model_file = self.model.get_weights_url("last")
else:
# Model has no saved weights
def get_train_args(config):
return {
'batch': config['batchSize'],
'epochs': config['epochs'],
'imgsz': config['imageSize'],
'patience': config['patience'],
'device': config['device'],
'cache': config['cache'],
'data': self.model.get_dataset_url(), }
"batch": config["batchSize"],
"epochs": config["epochs"],
"imgsz": config["imageSize"],
"patience": config["patience"],
"device": config["device"],
"cache": config["cache"],
"data": self.model.get_dataset_url(),
}
self.train_args = get_train_args(self.model.data.get('config'))
self.train_args = get_train_args(self.model.data.get("config"))
# Set the model file as either a *.pt or *.yaml file
self.model_file = (self.model.get_weights_url('parent')
if self.model.is_pretrained() else self.model.get_architecture())
self.model_file = (
self.model.get_weights_url("parent") if self.model.is_pretrained() else self.model.get_architecture()
)
if not self.train_args.get('data'):
raise ValueError('Dataset may still be processing. Please wait a minute and try again.') # RF fix
if not self.train_args.get("data"):
raise ValueError("Dataset may still be processing. Please wait a minute and try again.") # RF fix
self.model_file = checks.check_yolov5u_filename(self.model_file, verbose=False) # YOLOv5->YOLOv5u
self.model_id = self.model.id
@ -206,12 +209,11 @@ class HUBTrainingSession:
*args,
**kwargs,
):
def retry_request():
t0 = time.time() # Record the start time for the timeout
for i in range(retry + 1):
if (time.time() - t0) > timeout:
LOGGER.warning(f'{PREFIX}Timeout for request reached. {HELP_MSG}')
LOGGER.warning(f"{PREFIX}Timeout for request reached. {HELP_MSG}")
break # Timeout reached, exit loop
response = request_func(*args, **kwargs)
@ -219,8 +221,8 @@ class HUBTrainingSession:
self._show_upload_progress(progress_total, response)
if response is None:
LOGGER.warning(f'{PREFIX}Received no response from the request. {HELP_MSG}')
time.sleep(2 ** i) # Exponential backoff before retrying
LOGGER.warning(f"{PREFIX}Received no response from the request. {HELP_MSG}")
time.sleep(2**i) # Exponential backoff before retrying
continue # Skip further processing and retry
if HTTPStatus.OK <= response.status_code < HTTPStatus.MULTIPLE_CHOICES:
@ -231,13 +233,13 @@ class HUBTrainingSession:
message = self._get_failure_message(response, retry, timeout)
if verbose:
LOGGER.warning(f'{PREFIX}{message} {HELP_MSG} ({response.status_code})')
LOGGER.warning(f"{PREFIX}{message} {HELP_MSG} ({response.status_code})")
if not self._should_retry(response.status_code):
LOGGER.warning(f'{PREFIX}Request failed. {HELP_MSG} ({response.status_code}')
LOGGER.warning(f"{PREFIX}Request failed. {HELP_MSG} ({response.status_code}")
break # Not an error that should be retried, exit loop
time.sleep(2 ** i) # Exponential backoff for retries
time.sleep(2**i) # Exponential backoff for retries
return response
@ -253,7 +255,8 @@ class HUBTrainingSession:
retry_codes = {
HTTPStatus.REQUEST_TIMEOUT,
HTTPStatus.BAD_GATEWAY,
HTTPStatus.GATEWAY_TIMEOUT, }
HTTPStatus.GATEWAY_TIMEOUT,
}
return True if status_code in retry_codes else False
def _get_failure_message(self, response: requests.Response, retry: int, timeout: int):
@ -269,16 +272,18 @@ class HUBTrainingSession:
str: The retry message.
"""
if self._should_retry(response.status_code):
return f'Retrying {retry}x for {timeout}s.' if retry else ''
return f"Retrying {retry}x for {timeout}s." if retry else ""
elif response.status_code == HTTPStatus.TOO_MANY_REQUESTS: # rate limit
headers = response.headers
return (f"Rate limit reached ({headers['X-RateLimit-Remaining']}/{headers['X-RateLimit-Limit']}). "
f"Please retry after {headers['Retry-After']}s.")
return (
f"Rate limit reached ({headers['X-RateLimit-Remaining']}/{headers['X-RateLimit-Limit']}). "
f"Please retry after {headers['Retry-After']}s."
)
else:
try:
return response.json().get('message', 'No JSON message.')
return response.json().get("message", "No JSON message.")
except AttributeError:
return 'Unable to read JSON.'
return "Unable to read JSON."
def upload_metrics(self):
"""Upload model metrics to Ultralytics HUB."""
@ -303,7 +308,7 @@ class HUBTrainingSession:
final (bool): Indicates if the model is the final model after training.
"""
if Path(weights).is_file():
progress_total = (Path(weights).stat().st_size if final else None) # Only show progress if final
progress_total = Path(weights).stat().st_size if final else None # Only show progress if final
self.request_queue(
self.model.upload_model,
epoch=epoch,
@ -317,7 +322,7 @@ class HUBTrainingSession:
progress_total=progress_total,
)
else:
LOGGER.warning(f'{PREFIX}WARNING ⚠️ Model upload issue. Missing model {weights}.')
LOGGER.warning(f"{PREFIX}WARNING ⚠️ Model upload issue. Missing model {weights}.")
def _show_upload_progress(self, content_length: int, response: requests.Response) -> None:
"""
@ -330,6 +335,6 @@ class HUBTrainingSession:
Returns:
(None)
"""
with TQDM(total=content_length, unit='B', unit_scale=True, unit_divisor=1024) as pbar:
with TQDM(total=content_length, unit="B", unit_scale=True, unit_divisor=1024) as pbar:
for data in response.iter_content(chunk_size=1024):
pbar.update(len(data))

View file

@ -9,12 +9,26 @@ from pathlib import Path
import requests
from ultralytics.utils import (ENVIRONMENT, LOGGER, ONLINE, RANK, SETTINGS, TESTS_RUNNING, TQDM, TryExcept, __version__,
colorstr, get_git_origin_url, is_colab, is_git_dir, is_pip_package)
from ultralytics.utils import (
ENVIRONMENT,
LOGGER,
ONLINE,
RANK,
SETTINGS,
TESTS_RUNNING,
TQDM,
TryExcept,
__version__,
colorstr,
get_git_origin_url,
is_colab,
is_git_dir,
is_pip_package,
)
from ultralytics.utils.downloads import GITHUB_ASSETS_NAMES
PREFIX = colorstr('Ultralytics HUB: ')
HELP_MSG = 'If this issue persists please visit https://github.com/ultralytics/hub/issues for assistance.'
PREFIX = colorstr("Ultralytics HUB: ")
HELP_MSG = "If this issue persists please visit https://github.com/ultralytics/hub/issues for assistance."
def request_with_credentials(url: str) -> any:
@ -31,11 +45,13 @@ def request_with_credentials(url: str) -> any:
OSError: If the function is not run in a Google Colab environment.
"""
if not is_colab():
raise OSError('request_with_credentials() must run in a Colab environment')
raise OSError("request_with_credentials() must run in a Colab environment")
from google.colab import output # noqa
from IPython import display # noqa
display.display(
display.Javascript("""
display.Javascript(
"""
window._hub_tmp = new Promise((resolve, reject) => {
const timeout = setTimeout(() => reject("Failed authenticating existing browser session"), 5000)
fetch("%s", {
@ -50,8 +66,11 @@ def request_with_credentials(url: str) -> any:
reject(err);
});
});
""" % url))
return output.eval_js('_hub_tmp')
"""
% url
)
)
return output.eval_js("_hub_tmp")
def requests_with_progress(method, url, **kwargs):
@ -71,13 +90,13 @@ def requests_with_progress(method, url, **kwargs):
content length.
- If 'progress' is a number then progress bar will display assuming content length = progress.
"""
progress = kwargs.pop('progress', False)
progress = kwargs.pop("progress", False)
if not progress:
return requests.request(method, url, **kwargs)
response = requests.request(method, url, stream=True, **kwargs)
total = int(response.headers.get('content-length', 0) if isinstance(progress, bool) else progress) # total size
total = int(response.headers.get("content-length", 0) if isinstance(progress, bool) else progress) # total size
try:
pbar = TQDM(total=total, unit='B', unit_scale=True, unit_divisor=1024)
pbar = TQDM(total=total, unit="B", unit_scale=True, unit_divisor=1024)
for data in response.iter_content(chunk_size=1024):
pbar.update(len(data))
pbar.close()
@ -118,25 +137,27 @@ def smart_request(method, url, retry=3, timeout=30, thread=True, code=-1, verbos
if r.status_code < 300: # return codes in the 2xx range are generally considered "good" or "successful"
break
try:
m = r.json().get('message', 'No JSON message.')
m = r.json().get("message", "No JSON message.")
except AttributeError:
m = 'Unable to read JSON.'
m = "Unable to read JSON."
if i == 0:
if r.status_code in retry_codes:
m += f' Retrying {retry}x for {timeout}s.' if retry else ''
m += f" Retrying {retry}x for {timeout}s." if retry else ""
elif r.status_code == 429: # rate limit
h = r.headers # response headers
m = f"Rate limit reached ({h['X-RateLimit-Remaining']}/{h['X-RateLimit-Limit']}). " \
m = (
f"Rate limit reached ({h['X-RateLimit-Remaining']}/{h['X-RateLimit-Limit']}). "
f"Please retry after {h['Retry-After']}s."
)
if verbose:
LOGGER.warning(f'{PREFIX}{m} {HELP_MSG} ({r.status_code} #{code})')
LOGGER.warning(f"{PREFIX}{m} {HELP_MSG} ({r.status_code} #{code})")
if r.status_code not in retry_codes:
return r
time.sleep(2 ** i) # exponential standoff
time.sleep(2**i) # exponential standoff
return r
args = method, url
kwargs['progress'] = progress
kwargs["progress"] = progress
if thread:
threading.Thread(target=func, args=args, kwargs=kwargs, daemon=True).start()
else:
@ -155,7 +176,7 @@ class Events:
enabled (bool): A flag to enable or disable Events based on certain conditions.
"""
url = 'https://www.google-analytics.com/mp/collect?measurement_id=G-X8NCJYTQXM&api_secret=QLQrATrNSwGRFRLE-cbHJw'
url = "https://www.google-analytics.com/mp/collect?measurement_id=G-X8NCJYTQXM&api_secret=QLQrATrNSwGRFRLE-cbHJw"
def __init__(self):
"""Initializes the Events object with default values for events, rate_limit, and metadata."""
@ -163,19 +184,21 @@ class Events:
self.rate_limit = 60.0 # rate limit (seconds)
self.t = 0.0 # rate limit timer (seconds)
self.metadata = {
'cli': Path(sys.argv[0]).name == 'yolo',
'install': 'git' if is_git_dir() else 'pip' if is_pip_package() else 'other',
'python': '.'.join(platform.python_version_tuple()[:2]), # i.e. 3.10
'version': __version__,
'env': ENVIRONMENT,
'session_id': round(random.random() * 1E15),
'engagement_time_msec': 1000}
self.enabled = \
SETTINGS['sync'] and \
RANK in (-1, 0) and \
not TESTS_RUNNING and \
ONLINE and \
(is_pip_package() or get_git_origin_url() == 'https://github.com/ultralytics/ultralytics.git')
"cli": Path(sys.argv[0]).name == "yolo",
"install": "git" if is_git_dir() else "pip" if is_pip_package() else "other",
"python": ".".join(platform.python_version_tuple()[:2]), # i.e. 3.10
"version": __version__,
"env": ENVIRONMENT,
"session_id": round(random.random() * 1e15),
"engagement_time_msec": 1000,
}
self.enabled = (
SETTINGS["sync"]
and RANK in (-1, 0)
and not TESTS_RUNNING
and ONLINE
and (is_pip_package() or get_git_origin_url() == "https://github.com/ultralytics/ultralytics.git")
)
def __call__(self, cfg):
"""
@ -191,11 +214,13 @@ class Events:
# Attempt to add to events
if len(self.events) < 25: # Events list limited to 25 events (drop any events past this)
params = {
**self.metadata, 'task': cfg.task,
'model': cfg.model if cfg.model in GITHUB_ASSETS_NAMES else 'custom'}
if cfg.mode == 'export':
params['format'] = cfg.format
self.events.append({'name': cfg.mode, 'params': params})
**self.metadata,
"task": cfg.task,
"model": cfg.model if cfg.model in GITHUB_ASSETS_NAMES else "custom",
}
if cfg.mode == "export":
params["format"] = cfg.format
self.events.append({"name": cfg.mode, "params": params})
# Check rate limit
t = time.time()
@ -204,10 +229,10 @@ class Events:
return
# Time is over rate limiter, send now
data = {'client_id': SETTINGS['uuid'], 'events': self.events} # SHA-256 anonymized UUID hash and events list
data = {"client_id": SETTINGS["uuid"], "events": self.events} # SHA-256 anonymized UUID hash and events list
# POST equivalent to requests.post(self.url, json=data)
smart_request('post', self.url, json=data, retry=0, verbose=False)
smart_request("post", self.url, json=data, retry=0, verbose=False)
# Reset events and rate limit timer
self.events = []