Ruff format docstring Python code (#15792)
Signed-off-by: UltralyticsAssistant <web@ultralytics.com> Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
This commit is contained in:
parent
c1882a4327
commit
d27664216b
63 changed files with 370 additions and 374 deletions
|
|
@ -72,11 +72,11 @@ class Model(nn.Module):
|
|||
|
||||
Examples:
|
||||
>>> from ultralytics import YOLO
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> results = model.predict('image.jpg')
|
||||
>>> model.train(data='coco128.yaml', epochs=3)
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> results = model.predict("image.jpg")
|
||||
>>> model.train(data="coco128.yaml", epochs=3)
|
||||
>>> metrics = model.val()
|
||||
>>> model.export(format='onnx')
|
||||
>>> model.export(format="onnx")
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -166,8 +166,8 @@ class Model(nn.Module):
|
|||
Results object.
|
||||
|
||||
Examples:
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> results = model('https://ultralytics.com/images/bus.jpg')
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> results = model("https://ultralytics.com/images/bus.jpg")
|
||||
>>> for r in results:
|
||||
... print(f"Detected {len(r)} objects in image")
|
||||
"""
|
||||
|
|
@ -188,9 +188,9 @@ class Model(nn.Module):
|
|||
(bool): True if the model string is a valid Triton Server URL, False otherwise.
|
||||
|
||||
Examples:
|
||||
>>> Model.is_triton_model('http://localhost:8000/v2/models/yolov8n')
|
||||
>>> Model.is_triton_model("http://localhost:8000/v2/models/yolov8n")
|
||||
True
|
||||
>>> Model.is_triton_model('yolov8n.pt')
|
||||
>>> Model.is_triton_model("yolov8n.pt")
|
||||
False
|
||||
"""
|
||||
from urllib.parse import urlsplit
|
||||
|
|
@ -253,7 +253,7 @@ class Model(nn.Module):
|
|||
|
||||
Examples:
|
||||
>>> model = Model()
|
||||
>>> model._new('yolov8n.yaml', task='detect', verbose=True)
|
||||
>>> model._new("yolov8n.yaml", task="detect", verbose=True)
|
||||
"""
|
||||
cfg_dict = yaml_model_load(cfg)
|
||||
self.cfg = cfg
|
||||
|
|
@ -284,8 +284,8 @@ class Model(nn.Module):
|
|||
|
||||
Examples:
|
||||
>>> model = Model()
|
||||
>>> model._load('yolov8n.pt')
|
||||
>>> model._load('path/to/weights.pth', task='detect')
|
||||
>>> model._load("yolov8n.pt")
|
||||
>>> model._load("path/to/weights.pth", task="detect")
|
||||
"""
|
||||
if weights.lower().startswith(("https://", "http://", "rtsp://", "rtmp://", "tcp://")):
|
||||
weights = checks.check_file(weights, download_dir=SETTINGS["weights_dir"]) # download and return local file
|
||||
|
|
@ -348,7 +348,7 @@ class Model(nn.Module):
|
|||
AssertionError: If the model is not a PyTorch model.
|
||||
|
||||
Examples:
|
||||
>>> model = Model('yolov8n.pt')
|
||||
>>> model = Model("yolov8n.pt")
|
||||
>>> model.reset_weights()
|
||||
"""
|
||||
self._check_is_pytorch_model()
|
||||
|
|
@ -377,8 +377,8 @@ class Model(nn.Module):
|
|||
|
||||
Examples:
|
||||
>>> model = Model()
|
||||
>>> model.load('yolov8n.pt')
|
||||
>>> model.load(Path('path/to/weights.pt'))
|
||||
>>> model.load("yolov8n.pt")
|
||||
>>> model.load(Path("path/to/weights.pt"))
|
||||
"""
|
||||
self._check_is_pytorch_model()
|
||||
if isinstance(weights, (str, Path)):
|
||||
|
|
@ -402,8 +402,8 @@ class Model(nn.Module):
|
|||
AssertionError: If the model is not a PyTorch model.
|
||||
|
||||
Examples:
|
||||
>>> model = Model('yolov8n.pt')
|
||||
>>> model.save('my_model.pt')
|
||||
>>> model = Model("yolov8n.pt")
|
||||
>>> model.save("my_model.pt")
|
||||
"""
|
||||
self._check_is_pytorch_model()
|
||||
from copy import deepcopy
|
||||
|
|
@ -439,7 +439,7 @@ class Model(nn.Module):
|
|||
TypeError: If the model is not a PyTorch model.
|
||||
|
||||
Examples:
|
||||
>>> model = Model('yolov8n.pt')
|
||||
>>> model = Model("yolov8n.pt")
|
||||
>>> model.info() # Prints model summary
|
||||
>>> info_list = model.info(detailed=True, verbose=False) # Returns detailed info as a list
|
||||
"""
|
||||
|
|
@ -494,8 +494,8 @@ class Model(nn.Module):
|
|||
AssertionError: If the model is not a PyTorch model.
|
||||
|
||||
Examples:
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> image = 'https://ultralytics.com/images/bus.jpg'
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> image = "https://ultralytics.com/images/bus.jpg"
|
||||
>>> embeddings = model.embed(image)
|
||||
>>> print(embeddings[0].shape)
|
||||
"""
|
||||
|
|
@ -531,8 +531,8 @@ class Model(nn.Module):
|
|||
Results object.
|
||||
|
||||
Examples:
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> results = model.predict(source='path/to/image.jpg', conf=0.25)
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> results = model.predict(source="path/to/image.jpg", conf=0.25)
|
||||
>>> for r in results:
|
||||
... print(r.boxes.data) # print detection bounding boxes
|
||||
|
||||
|
|
@ -592,8 +592,8 @@ class Model(nn.Module):
|
|||
AttributeError: If the predictor does not have registered trackers.
|
||||
|
||||
Examples:
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> results = model.track(source='path/to/video.mp4', show=True)
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> results = model.track(source="path/to/video.mp4", show=True)
|
||||
>>> for r in results:
|
||||
... print(r.boxes.id) # print tracking IDs
|
||||
|
||||
|
|
@ -635,8 +635,8 @@ class Model(nn.Module):
|
|||
AssertionError: If the model is not a PyTorch model.
|
||||
|
||||
Examples:
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> results = model.val(data='coco128.yaml', imgsz=640)
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> results = model.val(data="coco128.yaml", imgsz=640)
|
||||
>>> print(results.box.map) # Print mAP50-95
|
||||
"""
|
||||
custom = {"rect": True} # method defaults
|
||||
|
|
@ -677,8 +677,8 @@ class Model(nn.Module):
|
|||
AssertionError: If the model is not a PyTorch model.
|
||||
|
||||
Examples:
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> results = model.benchmark(data='coco8.yaml', imgsz=640, half=True)
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> results = model.benchmark(data="coco8.yaml", imgsz=640, half=True)
|
||||
>>> print(results)
|
||||
"""
|
||||
self._check_is_pytorch_model()
|
||||
|
|
@ -727,8 +727,8 @@ class Model(nn.Module):
|
|||
RuntimeError: If the export process fails due to errors.
|
||||
|
||||
Examples:
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> model.export(format='onnx', dynamic=True, simplify=True)
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> model.export(format="onnx", dynamic=True, simplify=True)
|
||||
'path/to/exported/model.onnx'
|
||||
"""
|
||||
self._check_is_pytorch_model()
|
||||
|
|
@ -782,8 +782,8 @@ class Model(nn.Module):
|
|||
ModuleNotFoundError: If the HUB SDK is not installed.
|
||||
|
||||
Examples:
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> results = model.train(data='coco128.yaml', epochs=3)
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> results = model.train(data="coco128.yaml", epochs=3)
|
||||
"""
|
||||
self._check_is_pytorch_model()
|
||||
if hasattr(self.session, "model") and self.session.model.id: # Ultralytics HUB session with loaded model
|
||||
|
|
@ -847,7 +847,7 @@ class Model(nn.Module):
|
|||
AssertionError: If the model is not a PyTorch model.
|
||||
|
||||
Examples:
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> results = model.tune(use_ray=True, iterations=20)
|
||||
>>> print(results)
|
||||
"""
|
||||
|
|
@ -907,7 +907,7 @@ class Model(nn.Module):
|
|||
AttributeError: If the model or predictor does not have a 'names' attribute.
|
||||
|
||||
Examples:
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> print(model.names)
|
||||
{0: 'person', 1: 'bicycle', 2: 'car', ...}
|
||||
"""
|
||||
|
|
@ -957,7 +957,7 @@ class Model(nn.Module):
|
|||
(object | None): The transform object of the model if available, otherwise None.
|
||||
|
||||
Examples:
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> transforms = model.transforms
|
||||
>>> if transforms:
|
||||
... print(f"Model transforms: {transforms}")
|
||||
|
|
@ -986,9 +986,9 @@ class Model(nn.Module):
|
|||
Examples:
|
||||
>>> def on_train_start(trainer):
|
||||
... print("Training is starting!")
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> model.add_callback("on_train_start", on_train_start)
|
||||
>>> model.train(data='coco128.yaml', epochs=1)
|
||||
>>> model.train(data="coco128.yaml", epochs=1)
|
||||
"""
|
||||
self.callbacks[event].append(func)
|
||||
|
||||
|
|
@ -1005,9 +1005,9 @@ class Model(nn.Module):
|
|||
recognized by the Ultralytics callback system.
|
||||
|
||||
Examples:
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> model.add_callback('on_train_start', lambda: print('Training started'))
|
||||
>>> model.clear_callback('on_train_start')
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> model.add_callback("on_train_start", lambda: print("Training started"))
|
||||
>>> model.clear_callback("on_train_start")
|
||||
>>> # All callbacks for 'on_train_start' are now removed
|
||||
|
||||
Notes:
|
||||
|
|
@ -1035,8 +1035,8 @@ class Model(nn.Module):
|
|||
modifications, ensuring consistent behavior across different runs or experiments.
|
||||
|
||||
Examples:
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> model.add_callback('on_train_start', custom_function)
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> model.add_callback("on_train_start", custom_function)
|
||||
>>> model.reset_callbacks()
|
||||
# All callbacks are now reset to their default functions
|
||||
"""
|
||||
|
|
@ -1059,7 +1059,7 @@ class Model(nn.Module):
|
|||
(dict): A new dictionary containing only the specified include keys from the input arguments.
|
||||
|
||||
Examples:
|
||||
>>> original_args = {'imgsz': 640, 'data': 'coco.yaml', 'task': 'detect', 'batch': 16, 'epochs': 100}
|
||||
>>> original_args = {"imgsz": 640, "data": "coco.yaml", "task": "detect", "batch": 16, "epochs": 100}
|
||||
>>> reset_args = Model._reset_ckpt_args(original_args)
|
||||
>>> print(reset_args)
|
||||
{'imgsz': 640, 'data': 'coco.yaml', 'task': 'detect'}
|
||||
|
|
@ -1090,9 +1090,9 @@ class Model(nn.Module):
|
|||
NotImplementedError: If the specified key is not supported for the current task.
|
||||
|
||||
Examples:
|
||||
>>> model = Model(task='detect')
|
||||
>>> predictor = model._smart_load('predictor')
|
||||
>>> trainer = model._smart_load('trainer')
|
||||
>>> model = Model(task="detect")
|
||||
>>> predictor = model._smart_load("predictor")
|
||||
>>> trainer = model._smart_load("trainer")
|
||||
|
||||
Notes:
|
||||
- This method is typically used internally by other methods of the Model class.
|
||||
|
|
@ -1128,8 +1128,8 @@ class Model(nn.Module):
|
|||
Examples:
|
||||
>>> model = Model()
|
||||
>>> task_map = model.task_map
|
||||
>>> detect_class_map = task_map['detect']
|
||||
>>> segment_class_map = task_map['segment']
|
||||
>>> detect_class_map = task_map["detect"]
|
||||
>>> segment_class_map = task_map["segment"]
|
||||
|
||||
Note:
|
||||
The actual implementation of this method may vary depending on the specific tasks and
|
||||
|
|
|
|||
|
|
@ -143,7 +143,7 @@ class BaseTensor(SimpleClass):
|
|||
|
||||
Examples:
|
||||
>>> base_tensor = BaseTensor(torch.randn(3, 4), orig_shape=(480, 640))
|
||||
>>> cuda_tensor = base_tensor.to('cuda')
|
||||
>>> cuda_tensor = base_tensor.to("cuda")
|
||||
>>> float16_tensor = base_tensor.to(dtype=torch.float16)
|
||||
"""
|
||||
return self.__class__(torch.as_tensor(self.data).to(*args, **kwargs), self.orig_shape)
|
||||
|
|
@ -223,7 +223,7 @@ class Results(SimpleClass):
|
|||
>>> for result in results:
|
||||
... print(result.boxes) # Print detection boxes
|
||||
... result.show() # Display the annotated image
|
||||
... result.save(filename='result.jpg') # Save annotated image
|
||||
... result.save(filename="result.jpg") # Save annotated image
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -280,7 +280,7 @@ class Results(SimpleClass):
|
|||
(Results): A new Results object containing the specified subset of inference results.
|
||||
|
||||
Examples:
|
||||
>>> results = model('path/to/image.jpg') # Perform inference
|
||||
>>> results = model("path/to/image.jpg") # Perform inference
|
||||
>>> single_result = results[0] # Get the first result
|
||||
>>> subset_results = results[1:4] # Get a slice of results
|
||||
"""
|
||||
|
|
@ -319,7 +319,7 @@ class Results(SimpleClass):
|
|||
obb (torch.Tensor | None): A tensor of shape (N, 5) containing oriented bounding box coordinates.
|
||||
|
||||
Examples:
|
||||
>>> results = model('image.jpg')
|
||||
>>> results = model("image.jpg")
|
||||
>>> new_boxes = torch.tensor([[100, 100, 200, 200, 0.9, 0]])
|
||||
>>> results[0].update(boxes=new_boxes)
|
||||
"""
|
||||
|
|
@ -370,7 +370,7 @@ class Results(SimpleClass):
|
|||
(Results): A new Results object with all tensor attributes on CPU memory.
|
||||
|
||||
Examples:
|
||||
>>> results = model('path/to/image.jpg') # Perform inference
|
||||
>>> results = model("path/to/image.jpg") # Perform inference
|
||||
>>> cpu_result = results[0].cpu() # Move the first result to CPU
|
||||
>>> print(cpu_result.boxes.device) # Output: cpu
|
||||
"""
|
||||
|
|
@ -384,7 +384,7 @@ class Results(SimpleClass):
|
|||
(Results): A new Results object with all tensors converted to numpy arrays.
|
||||
|
||||
Examples:
|
||||
>>> results = model('path/to/image.jpg')
|
||||
>>> results = model("path/to/image.jpg")
|
||||
>>> numpy_result = results[0].numpy()
|
||||
>>> type(numpy_result.boxes.data)
|
||||
<class 'numpy.ndarray'>
|
||||
|
|
@ -488,7 +488,7 @@ class Results(SimpleClass):
|
|||
(np.ndarray): Annotated image as a numpy array.
|
||||
|
||||
Examples:
|
||||
>>> results = model('image.jpg')
|
||||
>>> results = model("image.jpg")
|
||||
>>> for result in results:
|
||||
... im = result.plot()
|
||||
... im.show()
|
||||
|
|
@ -578,7 +578,7 @@ class Results(SimpleClass):
|
|||
**kwargs (Any): Arbitrary keyword arguments to be passed to the `plot()` method.
|
||||
|
||||
Examples:
|
||||
>>> results = model('path/to/image.jpg')
|
||||
>>> results = model("path/to/image.jpg")
|
||||
>>> results[0].show() # Display the first result
|
||||
>>> for result in results:
|
||||
... result.show() # Display all results
|
||||
|
|
@ -599,12 +599,12 @@ class Results(SimpleClass):
|
|||
**kwargs (Any): Arbitrary keyword arguments to be passed to the `plot` method.
|
||||
|
||||
Examples:
|
||||
>>> results = model('path/to/image.jpg')
|
||||
>>> results = model("path/to/image.jpg")
|
||||
>>> for result in results:
|
||||
... result.save('annotated_image.jpg')
|
||||
... result.save("annotated_image.jpg")
|
||||
>>> # Or with custom plot arguments
|
||||
>>> for result in results:
|
||||
... result.save('annotated_image.jpg', conf=False, line_width=2)
|
||||
... result.save("annotated_image.jpg", conf=False, line_width=2)
|
||||
"""
|
||||
if not filename:
|
||||
filename = f"results_{Path(self.path).name}"
|
||||
|
|
@ -623,7 +623,7 @@ class Results(SimpleClass):
|
|||
number of detections per class. For classification tasks, it includes the top 5 class probabilities.
|
||||
|
||||
Examples:
|
||||
>>> results = model('path/to/image.jpg')
|
||||
>>> results = model("path/to/image.jpg")
|
||||
>>> for result in results:
|
||||
... print(result.verbose())
|
||||
2 persons, 1 car, 3 traffic lights,
|
||||
|
|
@ -660,7 +660,7 @@ class Results(SimpleClass):
|
|||
|
||||
Examples:
|
||||
>>> from ultralytics import YOLO
|
||||
>>> model = YOLO('yolov8n.pt')
|
||||
>>> model = YOLO("yolov8n.pt")
|
||||
>>> results = model("path/to/image.jpg")
|
||||
>>> for result in results:
|
||||
... result.save_txt("output.txt")
|
||||
|
|
@ -757,7 +757,7 @@ class Results(SimpleClass):
|
|||
task type (classification or detection) and available information (boxes, masks, keypoints).
|
||||
|
||||
Examples:
|
||||
>>> results = model('image.jpg')
|
||||
>>> results = model("image.jpg")
|
||||
>>> summary = results[0].summary()
|
||||
>>> print(summary)
|
||||
"""
|
||||
|
|
@ -919,7 +919,7 @@ class Boxes(BaseTensor):
|
|||
coordinates in [x1, y1, x2, y2] format, where n is the number of boxes.
|
||||
|
||||
Examples:
|
||||
>>> results = model('image.jpg')
|
||||
>>> results = model("image.jpg")
|
||||
>>> boxes = results[0].boxes
|
||||
>>> xyxy = boxes.xyxy
|
||||
>>> print(xyxy)
|
||||
|
|
@ -953,7 +953,7 @@ class Boxes(BaseTensor):
|
|||
The shape is (N,), where N is the number of boxes.
|
||||
|
||||
Examples:
|
||||
>>> results = model('image.jpg')
|
||||
>>> results = model("image.jpg")
|
||||
>>> boxes = results[0].boxes
|
||||
>>> class_ids = boxes.cls
|
||||
>>> print(class_ids) # tensor([0., 2., 1.])
|
||||
|
|
@ -970,7 +970,7 @@ class Boxes(BaseTensor):
|
|||
otherwise None. Shape is (N,) where N is the number of boxes.
|
||||
|
||||
Examples:
|
||||
>>> results = model.track('path/to/video.mp4')
|
||||
>>> results = model.track("path/to/video.mp4")
|
||||
>>> for result in results:
|
||||
... boxes = result.boxes
|
||||
... if boxes.is_track:
|
||||
|
|
@ -1116,7 +1116,7 @@ class Masks(BaseTensor):
|
|||
mask contour.
|
||||
|
||||
Examples:
|
||||
>>> results = model('image.jpg')
|
||||
>>> results = model("image.jpg")
|
||||
>>> masks = results[0].masks
|
||||
>>> normalized_coords = masks.xyn
|
||||
>>> print(normalized_coords[0]) # Normalized coordinates of the first mask
|
||||
|
|
@ -1141,7 +1141,7 @@ class Masks(BaseTensor):
|
|||
number of points in the segment.
|
||||
|
||||
Examples:
|
||||
>>> results = model('image.jpg')
|
||||
>>> results = model("image.jpg")
|
||||
>>> masks = results[0].masks
|
||||
>>> xy_coords = masks.xy
|
||||
>>> print(len(xy_coords)) # Number of masks
|
||||
|
|
@ -1223,7 +1223,7 @@ class Keypoints(BaseTensor):
|
|||
the number of detections and K is the number of keypoints per detection.
|
||||
|
||||
Examples:
|
||||
>>> results = model('image.jpg')
|
||||
>>> results = model("image.jpg")
|
||||
>>> keypoints = results[0].keypoints
|
||||
>>> xy = keypoints.xy
|
||||
>>> print(xy.shape) # (N, K, 2)
|
||||
|
|
@ -1388,7 +1388,7 @@ class Probs(BaseTensor):
|
|||
(torch.Tensor | numpy.ndarray): A tensor containing the confidence score of the top 1 class.
|
||||
|
||||
Examples:
|
||||
>>> results = model('image.jpg') # classify an image
|
||||
>>> results = model("image.jpg") # classify an image
|
||||
>>> probs = results[0].probs # get classification probabilities
|
||||
>>> top1_confidence = probs.top1conf # get confidence of top 1 class
|
||||
>>> print(f"Top 1 class confidence: {top1_confidence.item():.4f}")
|
||||
|
|
@ -1410,7 +1410,7 @@ class Probs(BaseTensor):
|
|||
top 5 predicted classes, sorted in descending order of probability.
|
||||
|
||||
Examples:
|
||||
>>> results = model('image.jpg')
|
||||
>>> results = model("image.jpg")
|
||||
>>> probs = results[0].probs
|
||||
>>> top5_conf = probs.top5conf
|
||||
>>> print(top5_conf) # Prints confidence scores for top 5 classes
|
||||
|
|
@ -1497,7 +1497,7 @@ class OBB(BaseTensor):
|
|||
[x_center, y_center, width, height, rotation]. The shape is (N, 5) where N is the number of boxes.
|
||||
|
||||
Examples:
|
||||
>>> results = model('image.jpg')
|
||||
>>> results = model("image.jpg")
|
||||
>>> obb = results[0].obb
|
||||
>>> xywhr = obb.xywhr
|
||||
>>> print(xywhr.shape)
|
||||
|
|
@ -1518,7 +1518,7 @@ class OBB(BaseTensor):
|
|||
for N detections, where each score is in the range [0, 1].
|
||||
|
||||
Examples:
|
||||
>>> results = model('image.jpg')
|
||||
>>> results = model("image.jpg")
|
||||
>>> obb_result = results[0].obb
|
||||
>>> confidence_scores = obb_result.conf
|
||||
>>> print(confidence_scores)
|
||||
|
|
@ -1535,7 +1535,7 @@ class OBB(BaseTensor):
|
|||
bounding box. The shape is (N,), where N is the number of boxes.
|
||||
|
||||
Examples:
|
||||
>>> results = model('image.jpg')
|
||||
>>> results = model("image.jpg")
|
||||
>>> result = results[0]
|
||||
>>> obb = result.obb
|
||||
>>> class_values = obb.cls
|
||||
|
|
@ -1553,7 +1553,7 @@ class OBB(BaseTensor):
|
|||
oriented bounding box. Returns None if tracking IDs are not available.
|
||||
|
||||
Examples:
|
||||
>>> results = model('image.jpg', tracker=True) # Run inference with tracking
|
||||
>>> results = model("image.jpg", tracker=True) # Run inference with tracking
|
||||
>>> for result in results:
|
||||
... if result.obb is not None:
|
||||
... track_ids = result.obb.id
|
||||
|
|
@ -1620,8 +1620,8 @@ class OBB(BaseTensor):
|
|||
Examples:
|
||||
>>> import torch
|
||||
>>> from ultralytics import YOLO
|
||||
>>> model = YOLO('yolov8n-obb.pt')
|
||||
>>> results = model('path/to/image.jpg')
|
||||
>>> model = YOLO("yolov8n-obb.pt")
|
||||
>>> results = model("path/to/image.jpg")
|
||||
>>> for result in results:
|
||||
... obb = result.obb
|
||||
... if obb is not None:
|
||||
|
|
|
|||
|
|
@ -12,8 +12,8 @@ Example:
|
|||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
model = YOLO('yolov8n.pt')
|
||||
model.tune(data='coco8.yaml', epochs=10, iterations=300, optimizer='AdamW', plots=False, save=False, val=False)
|
||||
model = YOLO("yolov8n.pt")
|
||||
model.tune(data="coco8.yaml", epochs=10, iterations=300, optimizer="AdamW", plots=False, save=False, val=False)
|
||||
```
|
||||
"""
|
||||
|
||||
|
|
@ -54,15 +54,15 @@ class Tuner:
|
|||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
model = YOLO('yolov8n.pt')
|
||||
model.tune(data='coco8.yaml', epochs=10, iterations=300, optimizer='AdamW', plots=False, save=False, val=False)
|
||||
model = YOLO("yolov8n.pt")
|
||||
model.tune(data="coco8.yaml", epochs=10, iterations=300, optimizer="AdamW", plots=False, save=False, val=False)
|
||||
```
|
||||
|
||||
Tune with custom search space.
|
||||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
model = YOLO('yolov8n.pt')
|
||||
model = YOLO("yolov8n.pt")
|
||||
model.tune(space={key1: val1, key2: val2}) # custom search space dictionary
|
||||
```
|
||||
"""
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue