Apply Ruff 0.9.0 (#18622)
Signed-off-by: Glenn Jocher <glenn.jocher@ultralytics.com> Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
This commit is contained in:
parent
cc1e77138c
commit
3902e740cf
22 changed files with 69 additions and 65 deletions
|
|
@ -357,7 +357,7 @@ class Exporter:
|
|||
)
|
||||
self.pretty_name = Path(self.model.yaml.get("yaml_file", self.file)).stem.replace("yolo", "YOLO")
|
||||
data = model.args["data"] if hasattr(model, "args") and isinstance(model.args, dict) else ""
|
||||
description = f'Ultralytics {self.pretty_name} model {f"trained on {data}" if data else ""}'
|
||||
description = f"Ultralytics {self.pretty_name} model {f'trained on {data}' if data else ''}"
|
||||
self.metadata = {
|
||||
"description": description,
|
||||
"author": "Ultralytics",
|
||||
|
|
@ -377,7 +377,7 @@ class Exporter:
|
|||
|
||||
LOGGER.info(
|
||||
f"\n{colorstr('PyTorch:')} starting from '{file}' with input shape {tuple(im.shape)} BCHW and "
|
||||
f'output shape(s) {self.output_shape} ({file_size(file):.1f} MB)'
|
||||
f"output shape(s) {self.output_shape} ({file_size(file):.1f} MB)"
|
||||
)
|
||||
|
||||
# Exports
|
||||
|
|
@ -427,11 +427,11 @@ class Exporter:
|
|||
predict_data = f"data={data}" if model.task == "segment" and fmt == "pb" else ""
|
||||
q = "int8" if self.args.int8 else "half" if self.args.half else "" # quantization
|
||||
LOGGER.info(
|
||||
f'\nExport complete ({time.time() - t:.1f}s)'
|
||||
f"\nExport complete ({time.time() - t:.1f}s)"
|
||||
f"\nResults saved to {colorstr('bold', file.parent.resolve())}"
|
||||
f'\nPredict: yolo predict task={model.task} model={f} imgsz={imgsz} {q} {predict_data}'
|
||||
f'\nValidate: yolo val task={model.task} model={f} imgsz={imgsz} data={data} {q} {s}'
|
||||
f'\nVisualize: https://netron.app'
|
||||
f"\nPredict: yolo predict task={model.task} model={f} imgsz={imgsz} {q} {predict_data}"
|
||||
f"\nValidate: yolo val task={model.task} model={f} imgsz={imgsz} data={data} {q} {s}"
|
||||
f"\nVisualize: https://netron.app"
|
||||
)
|
||||
|
||||
self.run_callbacks("on_export_end")
|
||||
|
|
@ -680,16 +680,16 @@ class Exporter:
|
|||
shutil.rmtree(unzip_dir) # delete unzip dir
|
||||
|
||||
ncnn_args = [
|
||||
f'ncnnparam={f / "model.ncnn.param"}',
|
||||
f'ncnnbin={f / "model.ncnn.bin"}',
|
||||
f'ncnnpy={f / "model_ncnn.py"}',
|
||||
f"ncnnparam={f / 'model.ncnn.param'}",
|
||||
f"ncnnbin={f / 'model.ncnn.bin'}",
|
||||
f"ncnnpy={f / 'model_ncnn.py'}",
|
||||
]
|
||||
|
||||
pnnx_args = [
|
||||
f'pnnxparam={f / "model.pnnx.param"}',
|
||||
f'pnnxbin={f / "model.pnnx.bin"}',
|
||||
f'pnnxpy={f / "model_pnnx.py"}',
|
||||
f'pnnxonnx={f / "model.pnnx.onnx"}',
|
||||
f"pnnxparam={f / 'model.pnnx.param'}",
|
||||
f"pnnxbin={f / 'model.pnnx.bin'}",
|
||||
f"pnnxpy={f / 'model_pnnx.py'}",
|
||||
f"pnnxonnx={f / 'model.pnnx.onnx'}",
|
||||
]
|
||||
|
||||
cmd = [
|
||||
|
|
@ -1139,7 +1139,9 @@ class Exporter:
|
|||
def export_imx(self, prefix=colorstr("IMX:")):
|
||||
"""YOLO IMX export."""
|
||||
gptq = False
|
||||
assert LINUX, "export only supported on Linux. See https://developer.aitrios.sony-semicon.com/en/raspberrypi-ai-camera/documentation/imx500-converter"
|
||||
assert LINUX, (
|
||||
"export only supported on Linux. See https://developer.aitrios.sony-semicon.com/en/raspberrypi-ai-camera/documentation/imx500-converter"
|
||||
)
|
||||
if getattr(self.model, "end2end", False):
|
||||
raise ValueError("IMX export is not supported for end2end models.")
|
||||
if "C2f" not in self.model.__str__():
|
||||
|
|
|
|||
|
|
@ -367,7 +367,7 @@ class BasePredictor:
|
|||
# Save videos and streams
|
||||
if self.dataset.mode in {"stream", "video"}:
|
||||
fps = self.dataset.fps if self.dataset.mode == "video" else 30
|
||||
frames_path = f'{save_path.split(".", 1)[0]}_frames/'
|
||||
frames_path = f"{save_path.split('.', 1)[0]}_frames/"
|
||||
if save_path not in self.vid_writer: # new video
|
||||
if self.args.save_frames:
|
||||
Path(frames_path).mkdir(parents=True, exist_ok=True)
|
||||
|
|
|
|||
|
|
@ -196,7 +196,7 @@ class BaseTrainer:
|
|||
# Command
|
||||
cmd, file = generate_ddp_command(world_size, self)
|
||||
try:
|
||||
LOGGER.info(f'{colorstr("DDP:")} debug command {" ".join(cmd)}')
|
||||
LOGGER.info(f"{colorstr('DDP:')} debug command {' '.join(cmd)}")
|
||||
subprocess.run(cmd, check=True)
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
|
@ -329,10 +329,10 @@ class BaseTrainer:
|
|||
self.train_time_start = time.time()
|
||||
self.run_callbacks("on_train_start")
|
||||
LOGGER.info(
|
||||
f'Image sizes {self.args.imgsz} train, {self.args.imgsz} val\n'
|
||||
f'Using {self.train_loader.num_workers * (world_size or 1)} dataloader workers\n'
|
||||
f"Image sizes {self.args.imgsz} train, {self.args.imgsz} val\n"
|
||||
f"Using {self.train_loader.num_workers * (world_size or 1)} dataloader workers\n"
|
||||
f"Logging results to {colorstr('bold', self.save_dir)}\n"
|
||||
f'Starting training for ' + (f"{self.args.time} hours..." if self.args.time else f"{self.epochs} epochs...")
|
||||
f"Starting training for " + (f"{self.args.time} hours..." if self.args.time else f"{self.epochs} epochs...")
|
||||
)
|
||||
if self.args.close_mosaic:
|
||||
base_idx = (self.epochs - self.args.close_mosaic) * nb
|
||||
|
|
@ -814,6 +814,6 @@ class BaseTrainer:
|
|||
optimizer.add_param_group({"params": g[1], "weight_decay": 0.0}) # add g1 (BatchNorm2d weights)
|
||||
LOGGER.info(
|
||||
f"{colorstr('optimizer:')} {type(optimizer).__name__}(lr={lr}, momentum={momentum}) with parameter groups "
|
||||
f'{len(g[1])} weight(decay=0.0), {len(g[0])} weight(decay={decay}), {len(g[2])} bias(decay=0.0)'
|
||||
f"{len(g[1])} weight(decay=0.0), {len(g[0])} weight(decay={decay}), {len(g[2])} bias(decay=0.0)"
|
||||
)
|
||||
return optimizer
|
||||
|
|
|
|||
|
|
@ -224,12 +224,12 @@ class Tuner:
|
|||
|
||||
# Save and print tune results
|
||||
header = (
|
||||
f'{self.prefix}{i + 1}/{iterations} iterations complete ✅ ({time.time() - t0:.2f}s)\n'
|
||||
f'{self.prefix}Results saved to {colorstr("bold", self.tune_dir)}\n'
|
||||
f'{self.prefix}Best fitness={fitness[best_idx]} observed at iteration {best_idx + 1}\n'
|
||||
f'{self.prefix}Best fitness metrics are {best_metrics}\n'
|
||||
f'{self.prefix}Best fitness model is {best_save_dir}\n'
|
||||
f'{self.prefix}Best fitness hyperparameters are printed below.\n'
|
||||
f"{self.prefix}{i + 1}/{iterations} iterations complete ✅ ({time.time() - t0:.2f}s)\n"
|
||||
f"{self.prefix}Results saved to {colorstr('bold', self.tune_dir)}\n"
|
||||
f"{self.prefix}Best fitness={fitness[best_idx]} observed at iteration {best_idx + 1}\n"
|
||||
f"{self.prefix}Best fitness metrics are {best_metrics}\n"
|
||||
f"{self.prefix}Best fitness model is {best_save_dir}\n"
|
||||
f"{self.prefix}Best fitness hyperparameters are printed below.\n"
|
||||
)
|
||||
LOGGER.info("\n" + header)
|
||||
data = {k: float(x[best_idx, i + 1]) for i, k in enumerate(self.space.keys())}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue