Apply ruff==0.9.0 formatting (#18624)

Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
This commit is contained in:
Glenn Jocher 2025-01-10 17:27:22 +01:00 committed by GitHub
parent c196a82bfa
commit 34b339d033
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 17 additions and 30 deletions

View file

@ -473,8 +473,7 @@ def check_dict_alignment(base: Dict, custom: Dict, e=None):
""" """
custom = _handle_deprecation(custom) custom = _handle_deprecation(custom)
base_keys, custom_keys = (set(x.keys()) for x in (base, custom)) base_keys, custom_keys = (set(x.keys()) for x in (base, custom))
mismatched = [k for k in custom_keys if k not in base_keys] if mismatched := [k for k in custom_keys if k not in base_keys]:
if mismatched:
from difflib import get_close_matches from difflib import get_close_matches
string = "" string = ""

View file

@ -60,8 +60,7 @@ def exif_size(img: Image.Image):
s = img.size # (width, height) s = img.size # (width, height)
if img.format == "JPEG": # only support JPEG images if img.format == "JPEG": # only support JPEG images
try: try:
exif = img.getexif() if exif := img.getexif():
if exif:
rotation = exif.get(274, None) # the EXIF key for the orientation tag is 274 rotation = exif.get(274, None) # the EXIF key for the orientation tag is 274
if rotation in {6, 8}: # rotation 270 or 90 if rotation in {6, 8}: # rotation 270 or 90
s = s[1], s[0] s = s[1], s[0]
@ -125,8 +124,7 @@ def verify_image_label(args):
segments = [np.array(x[1:], dtype=np.float32).reshape(-1, 2) for x in lb] # (cls, xy1...) segments = [np.array(x[1:], dtype=np.float32).reshape(-1, 2) for x in lb] # (cls, xy1...)
lb = np.concatenate((classes.reshape(-1, 1), segments2boxes(segments)), 1) # (cls, xywh) lb = np.concatenate((classes.reshape(-1, 1), segments2boxes(segments)), 1) # (cls, xywh)
lb = np.array(lb, dtype=np.float32) lb = np.array(lb, dtype=np.float32)
nl = len(lb) if nl := len(lb):
if nl:
if keypoint: if keypoint:
assert lb.shape[1] == (5 + nkpt * ndim), f"labels require {(5 + nkpt * ndim)} columns each" assert lb.shape[1] == (5 + nkpt * ndim), f"labels require {(5 + nkpt * ndim)} columns each"
points = lb[:, 5:].reshape(-1, ndim)[:, :2] points = lb[:, 5:].reshape(-1, ndim)[:, :2]

View file

@ -652,12 +652,11 @@ class Results(SimpleClass):
""" """
log_string = "" log_string = ""
probs = self.probs probs = self.probs
boxes = self.boxes
if len(self) == 0: if len(self) == 0:
return log_string if probs is not None else f"{log_string}(no detections), " return log_string if probs is not None else f"{log_string}(no detections), "
if probs is not None: if probs is not None:
log_string += f"{', '.join(f'{self.names[j]} {probs.data[j]:.2f}' for j in probs.top5)}, " log_string += f"{', '.join(f'{self.names[j]} {probs.data[j]:.2f}' for j in probs.top5)}, "
if boxes: if boxes := self.boxes:
for c in boxes.cls.unique(): for c in boxes.cls.unique():
n = (boxes.cls == c).sum() # detections per class n = (boxes.cls == c).sum() # detections per class
log_string += f"{n} {self.names[int(c)]}{'s' * (n > 1)}, " log_string += f"{n} {self.names[int(c)]}{'s' * (n > 1)}, "

View file

@ -106,7 +106,7 @@ class SAM(Model):
... print(f"Detected {len(r.masks)} masks") ... print(f"Detected {len(r.masks)} masks")
""" """
overrides = dict(conf=0.25, task="segment", mode="predict", imgsz=1024) overrides = dict(conf=0.25, task="segment", mode="predict", imgsz=1024)
kwargs = {**overrides, **kwargs} kwargs = overrides | kwargs
prompts = dict(bboxes=bboxes, points=points, labels=labels) prompts = dict(bboxes=bboxes, points=points, labels=labels)
return super().predict(source, stream, prompts=prompts, **kwargs) return super().predict(source, stream, prompts=prompts, **kwargs)

View file

@ -54,7 +54,7 @@ def select_closest_cond_frames(frame_idx, cond_frame_outputs, max_cond_frame_num
(t for t in cond_frame_outputs if t not in selected_outputs), (t for t in cond_frame_outputs if t not in selected_outputs),
key=lambda x: abs(x - frame_idx), key=lambda x: abs(x - frame_idx),
)[:num_remain] )[:num_remain]
selected_outputs.update((t, cond_frame_outputs[t]) for t in inds_remain) selected_outputs |= ((t, cond_frame_outputs[t]) for t in inds_remain)
unselected_outputs = {t: v for t, v in cond_frame_outputs.items() if t not in selected_outputs} unselected_outputs = {t: v for t, v in cond_frame_outputs.items() if t not in selected_outputs}
return selected_outputs, unselected_outputs return selected_outputs, unselected_outputs

View file

@ -244,7 +244,7 @@ class DETRLoss(nn.Module):
gt_scores[idx] = bbox_iou(pred_bboxes.detach(), gt_bboxes, xywh=True).squeeze(-1) gt_scores[idx] = bbox_iou(pred_bboxes.detach(), gt_bboxes, xywh=True).squeeze(-1)
loss = {} loss = {}
loss.update(self._get_loss_class(pred_scores, targets, gt_scores, len(gt_bboxes), postfix)) loss |= self._get_loss_class(pred_scores, targets, gt_scores, len(gt_bboxes), postfix)
loss.update(self._get_loss_bbox(pred_bboxes, gt_bboxes, postfix)) loss.update(self._get_loss_bbox(pred_bboxes, gt_bboxes, postfix))
# if masks is not None and gt_mask is not None: # if masks is not None and gt_mask is not None:
# loss.update(self._get_loss_mask(masks, gt_mask, match_indices, postfix)) # loss.update(self._get_loss_mask(masks, gt_mask, match_indices, postfix))

View file

@ -62,8 +62,7 @@ class Inference:
self.selected_ind = [] # List of selected classes for detection or tracking self.selected_ind = [] # List of selected classes for detection or tracking
self.model = None # Container for the loaded model instance self.model = None # Container for the loaded model instance
self.temp_dict = {"model": None} # Temporary dict to store the model path self.temp_dict = {"model": None} | kwargs
self.temp_dict.update(kwargs)
self.model_path = None # Store model file name with path self.model_path = None # Store model file name with path
if self.temp_dict["model"] is not None: if self.temp_dict["model"] is not None:
self.model_path = self.temp_dict["model"] self.model_path = self.temp_dict["model"]

View file

@ -1243,7 +1243,7 @@ class SettingsManager(JSONDict):
"""Updates settings, validating keys and types.""" """Updates settings, validating keys and types."""
for arg in args: for arg in args:
if isinstance(arg, dict): if isinstance(arg, dict):
kwargs.update(arg) kwargs |= arg
for k, v in kwargs.items(): for k, v in kwargs.items():
if k not in self.defaults: if k not in self.defaults:
raise KeyError(f"No Ultralytics setting '{k}'. {self.help_msg}") raise KeyError(f"No Ultralytics setting '{k}'. {self.help_msg}")

View file

@ -15,16 +15,14 @@ def on_pretrain_routine_start(trainer):
def on_pretrain_routine_end(trainer): def on_pretrain_routine_end(trainer):
"""Logs info before starting timer for upload rate limit.""" """Logs info before starting timer for upload rate limit."""
session = getattr(trainer, "hub_session", None) if session := getattr(trainer, "hub_session", None):
if session:
# Start timer for upload rate limit # Start timer for upload rate limit
session.timers = {"metrics": time(), "ckpt": time()} # start timer on session.rate_limit session.timers = {"metrics": time(), "ckpt": time()} # start timer on session.rate_limit
def on_fit_epoch_end(trainer): def on_fit_epoch_end(trainer):
"""Uploads training progress metrics at the end of each epoch.""" """Uploads training progress metrics at the end of each epoch."""
session = getattr(trainer, "hub_session", None) if session := getattr(trainer, "hub_session", None):
if session:
# Upload metrics after val end # Upload metrics after val end
all_plots = { all_plots = {
**trainer.label_loss_items(trainer.tloss, prefix="train"), **trainer.label_loss_items(trainer.tloss, prefix="train"),
@ -49,8 +47,7 @@ def on_fit_epoch_end(trainer):
def on_model_save(trainer): def on_model_save(trainer):
"""Saves checkpoints to Ultralytics HUB with rate limiting.""" """Saves checkpoints to Ultralytics HUB with rate limiting."""
session = getattr(trainer, "hub_session", None) if session := getattr(trainer, "hub_session", None):
if session:
# Upload checkpoints with rate limiting # Upload checkpoints with rate limiting
is_best = trainer.best_fitness == trainer.fitness is_best = trainer.best_fitness == trainer.fitness
if time() - session.timers["ckpt"] > session.rate_limits["ckpt"]: if time() - session.timers["ckpt"] > session.rate_limits["ckpt"]:
@ -61,8 +58,7 @@ def on_model_save(trainer):
def on_train_end(trainer): def on_train_end(trainer):
"""Upload final model and metrics to Ultralytics HUB at the end of training.""" """Upload final model and metrics to Ultralytics HUB at the end of training."""
session = getattr(trainer, "hub_session", None) if session := getattr(trainer, "hub_session", None):
if session:
# Upload final model and metrics with exponential standoff # Upload final model and metrics with exponential standoff
LOGGER.info(f"{PREFIX}Syncing final model...") LOGGER.info(f"{PREFIX}Syncing final model...")
session.upload_model( session.upload_model(

View file

@ -75,8 +75,7 @@ def parse_requirements(file_path=ROOT.parent / "requirements.txt", package=""):
line = line.strip() line = line.strip()
if line and not line.startswith("#"): if line and not line.startswith("#"):
line = line.split("#")[0].strip() # ignore inline comments line = line.split("#")[0].strip() # ignore inline comments
match = re.match(r"([a-zA-Z0-9-_]+)\s*([<>!=~]+.*)?", line) if match := re.match(r"([a-zA-Z0-9-_]+)\s*([<>!=~]+.*)?", line):
if match:
requirements.append(SimpleNamespace(name=match[1], specifier=match[2].strip() if match[2] else "")) requirements.append(SimpleNamespace(name=match[1], specifier=match[2].strip() if match[2] else ""))
return requirements return requirements

View file

@ -269,8 +269,7 @@ def get_google_drive_file_info(link):
for k, v in response.cookies.items(): for k, v in response.cookies.items():
if k.startswith("download_warning"): if k.startswith("download_warning"):
drive_url += f"&confirm={v}" # v is token drive_url += f"&confirm={v}" # v is token
cd = response.headers.get("content-disposition") if cd := response.headers.get("content-disposition"):
if cd:
filename = re.findall('filename="(.+)"', cd)[0] filename = re.findall('filename="(.+)"', cd)[0]
return drive_url, filename return drive_url, filename

View file

@ -189,8 +189,7 @@ class v8DetectionLoss:
out = torch.zeros(batch_size, counts.max(), ne - 1, device=self.device) out = torch.zeros(batch_size, counts.max(), ne - 1, device=self.device)
for j in range(batch_size): for j in range(batch_size):
matches = i == j matches = i == j
n = matches.sum() if n := matches.sum():
if n:
out[j, :n] = targets[matches, 1:] out[j, :n] = targets[matches, 1:]
out[..., 1:5] = xywh2xyxy(out[..., 1:5].mul_(scale_tensor)) out[..., 1:5] = xywh2xyxy(out[..., 1:5].mul_(scale_tensor))
return out return out
@ -630,8 +629,7 @@ class v8OBBLoss(v8DetectionLoss):
out = torch.zeros(batch_size, counts.max(), 6, device=self.device) out = torch.zeros(batch_size, counts.max(), 6, device=self.device)
for j in range(batch_size): for j in range(batch_size):
matches = i == j matches = i == j
n = matches.sum() if n := matches.sum():
if n:
bboxes = targets[matches, 2:] bboxes = targets[matches, 2:]
bboxes[..., :4].mul_(scale_tensor) bboxes[..., :4].mul_(scale_tensor)
out[j, :n] = torch.cat([targets[matches, 1:2], bboxes], dim=-1) out[j, :n] = torch.cat([targets[matches, 1:2], bboxes], dim=-1)