YOLO11 Tasks, Modes, Usage, Macros and Solutions Updates (#16593)
Signed-off-by: UltralyticsAssistant <web@ultralytics.com>
This commit is contained in:
parent
3093fc9ec2
commit
51e93d6111
31 changed files with 541 additions and 541 deletions
|
|
@ -1,7 +1,7 @@
|
|||
---
|
||||
comments: true
|
||||
description: Explore Ultralytics callbacks for training, validation, exporting, and prediction. Learn how to use and customize them for your ML models.
|
||||
keywords: Ultralytics, callbacks, training, validation, export, prediction, ML models, YOLOv8, Python, machine learning
|
||||
keywords: Ultralytics, callbacks, training, validation, export, prediction, ML models, YOLO11, Python, machine learning
|
||||
---
|
||||
|
||||
## Callbacks
|
||||
|
|
@ -16,7 +16,7 @@ Ultralytics framework supports callbacks as entry points in strategic stages of
|
|||
allowfullscreen>
|
||||
</iframe>
|
||||
<br>
|
||||
<strong>Watch:</strong> Mastering Ultralytics YOLOv8: Callbacks
|
||||
<strong>Watch:</strong> Mastering Ultralytics YOLO: Callbacks
|
||||
</p>
|
||||
|
||||
## Examples
|
||||
|
|
@ -41,7 +41,7 @@ def on_predict_batch_end(predictor):
|
|||
|
||||
|
||||
# Create a YOLO model instance
|
||||
model = YOLO("yolov8n.pt")
|
||||
model = YOLO("yolo11n.pt")
|
||||
|
||||
# Add the custom callback to the model
|
||||
model.add_callback("on_predict_batch_end", on_predict_batch_end)
|
||||
|
|
@ -119,7 +119,7 @@ def on_predict_batch_end(predictor):
|
|||
predictor.results = zip(predictor.results, image)
|
||||
|
||||
|
||||
model = YOLO("yolov8n.pt")
|
||||
model = YOLO("yolo11n.pt")
|
||||
model.add_callback("on_predict_batch_end", on_predict_batch_end)
|
||||
for result, frame in model.predict():
|
||||
pass
|
||||
|
|
@ -141,7 +141,7 @@ def on_train_epoch_end(trainer):
|
|||
trainer.log({"additional_metric": additional_metric})
|
||||
|
||||
|
||||
model = YOLO("yolov8n.pt")
|
||||
model = YOLO("yolo11n.pt")
|
||||
model.add_callback("on_train_epoch_end", on_train_epoch_end)
|
||||
model.train(data="coco.yaml", epochs=10)
|
||||
```
|
||||
|
|
@ -164,7 +164,7 @@ def on_val_end(validator):
|
|||
validator.log({"custom_metric": custom_metric})
|
||||
|
||||
|
||||
model = YOLO("yolov8n.pt")
|
||||
model = YOLO("yolo11n.pt")
|
||||
model.add_callback("on_val_end", on_val_end)
|
||||
model.val(data="coco.yaml")
|
||||
```
|
||||
|
|
@ -187,7 +187,7 @@ def on_predict_end(predictor):
|
|||
log_prediction(result)
|
||||
|
||||
|
||||
model = YOLO("yolov8n.pt")
|
||||
model = YOLO("yolo11n.pt")
|
||||
model.add_callback("on_predict_end", on_predict_end)
|
||||
results = model.predict(source="image.jpg")
|
||||
```
|
||||
|
|
@ -215,7 +215,7 @@ def on_predict_batch_end(predictor):
|
|||
predictor.results = zip(predictor.results, image)
|
||||
|
||||
|
||||
model = YOLO("yolov8n.pt")
|
||||
model = YOLO("yolo11n.pt")
|
||||
model.add_callback("on_predict_batch_end", on_predict_batch_end)
|
||||
for result, frame in model.predict():
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ YOLO settings and hyperparameters play a critical role in the model's performanc
|
|||
allowfullscreen>
|
||||
</iframe>
|
||||
<br>
|
||||
<strong>Watch:</strong> Mastering Ultralytics YOLOv8: Configuration
|
||||
<strong>Watch:</strong> Mastering Ultralytics YOLO: Configuration
|
||||
</p>
|
||||
|
||||
Ultralytics commands use the following syntax:
|
||||
|
|
@ -32,8 +32,8 @@ Ultralytics commands use the following syntax:
|
|||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
# Load a YOLOv8 model from a pre-trained weights file
|
||||
model = YOLO("yolov8n.pt")
|
||||
# Load a YOLO11 model from a pre-trained weights file
|
||||
model = YOLO("yolo11n.pt")
|
||||
|
||||
# Run MODE mode using the custom arguments ARGS (guess TASK)
|
||||
model.MODE(ARGS)
|
||||
|
|
@ -67,12 +67,12 @@ YOLO models can be used for a variety of tasks, including detection, segmentatio
|
|||
|
||||
YOLO models can be used in different modes depending on the specific problem you are trying to solve. These modes include:
|
||||
|
||||
- **Train**: For training a YOLOv8 model on a custom dataset.
|
||||
- **Val**: For validating a YOLOv8 model after it has been trained.
|
||||
- **Predict**: For making predictions using a trained YOLOv8 model on new images or videos.
|
||||
- **Export**: For exporting a YOLOv8 model to a format that can be used for deployment.
|
||||
- **Track**: For tracking objects in real-time using a YOLOv8 model.
|
||||
- **Benchmark**: For benchmarking YOLOv8 exports (ONNX, TensorRT, etc.) speed and accuracy.
|
||||
- **Train**: For training a YOLO11 model on a custom dataset.
|
||||
- **Val**: For validating a YOLO11 model after it has been trained.
|
||||
- **Predict**: For making predictions using a trained YOLO11 model on new images or videos.
|
||||
- **Export**: For exporting a YOLO11 model to a format that can be used for deployment.
|
||||
- **Track**: For tracking objects in real-time using a YOLO11 model.
|
||||
- **Benchmark**: For benchmarking YOLO11 exports (ONNX, TensorRT, etc.) speed and accuracy.
|
||||
|
||||
| Argument | Default | Description |
|
||||
| -------- | --------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
---
|
||||
comments: true
|
||||
description: Explore the YOLOv8 command line interface (CLI) for easy execution of detection tasks without needing a Python environment.
|
||||
keywords: YOLOv8 CLI, command line interface, YOLOv8 commands, detection tasks, Ultralytics, model training, model prediction
|
||||
description: Explore the YOLO11 command line interface (CLI) for easy execution of detection tasks without needing a Python environment.
|
||||
keywords: YOLO11 CLI, command line interface, YOLO11 commands, detection tasks, Ultralytics, model training, model prediction
|
||||
---
|
||||
|
||||
# Command Line Interface Usage
|
||||
|
|
@ -16,7 +16,7 @@ The YOLO command line interface (CLI) allows for simple single-line commands wit
|
|||
allowfullscreen>
|
||||
</iframe>
|
||||
<br>
|
||||
<strong>Watch:</strong> Mastering Ultralytics YOLOv8: CLI
|
||||
<strong>Watch:</strong> Mastering Ultralytics YOLO: CLI
|
||||
</p>
|
||||
|
||||
!!! example
|
||||
|
|
@ -37,28 +37,28 @@ The YOLO command line interface (CLI) allows for simple single-line commands wit
|
|||
|
||||
Train a detection model for 10 [epochs](https://www.ultralytics.com/glossary/epoch) with an initial learning_rate of 0.01
|
||||
```bash
|
||||
yolo train data=coco8.yaml model=yolov8n.pt epochs=10 lr0=0.01
|
||||
yolo train data=coco8.yaml model=yolo11n.pt epochs=10 lr0=0.01
|
||||
```
|
||||
|
||||
=== "Predict"
|
||||
|
||||
Predict a YouTube video using a pretrained segmentation model at image size 320:
|
||||
```bash
|
||||
yolo predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320
|
||||
yolo predict model=yolo11n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320
|
||||
```
|
||||
|
||||
=== "Val"
|
||||
|
||||
Val a pretrained detection model at batch-size 1 and image size 640:
|
||||
```bash
|
||||
yolo val model=yolov8n.pt data=coco8.yaml batch=1 imgsz=640
|
||||
yolo val model=yolo11n.pt data=coco8.yaml batch=1 imgsz=640
|
||||
```
|
||||
|
||||
=== "Export"
|
||||
|
||||
Export a YOLOv8n classification model to ONNX format at image size 224 by 128 (no TASK required)
|
||||
Export a YOLO11n classification model to ONNX format at image size 224 by 128 (no TASK required)
|
||||
```bash
|
||||
yolo export model=yolov8n-cls.pt format=onnx imgsz=224,128
|
||||
yolo export model=yolo11n-cls.pt format=onnx imgsz=224,128
|
||||
```
|
||||
|
||||
=== "Special"
|
||||
|
|
@ -75,7 +75,7 @@ The YOLO command line interface (CLI) allows for simple single-line commands wit
|
|||
|
||||
Where:
|
||||
|
||||
- `TASK` (optional) is one of `[detect, segment, classify, pose, obb]`. If it is not passed explicitly YOLOv8 will try to guess the `TASK` from the model type.
|
||||
- `TASK` (optional) is one of `[detect, segment, classify, pose, obb]`. If it is not passed explicitly YOLO11 will try to guess the `TASK` from the model type.
|
||||
- `MODE` (required) is one of `[train, val, predict, export, track, benchmark]`
|
||||
- `ARGS` (optional) are any number of custom `arg=value` pairs like `imgsz=320` that override defaults. For a full list of available `ARGS` see the [Configuration](cfg.md) page and `defaults.yaml`
|
||||
|
||||
|
|
@ -83,21 +83,21 @@ Where:
|
|||
|
||||
Arguments must be passed as `arg=val` pairs, split by an equals `=` sign and delimited by spaces ` ` between pairs. Do not use `--` argument prefixes or commas `,` between arguments.
|
||||
|
||||
- `yolo predict model=yolov8n.pt imgsz=640 conf=0.25` ✅
|
||||
- `yolo predict model yolov8n.pt imgsz 640 conf 0.25` ❌
|
||||
- `yolo predict --model yolov8n.pt --imgsz 640 --conf 0.25` ❌
|
||||
- `yolo predict model=yolo11n.pt imgsz=640 conf=0.25` ✅
|
||||
- `yolo predict model yolo11n.pt imgsz 640 conf 0.25` ❌
|
||||
- `yolo predict --model yolo11n.pt --imgsz 640 --conf 0.25` ❌
|
||||
|
||||
## Train
|
||||
|
||||
Train YOLOv8n on the COCO8 dataset for 100 epochs at image size 640. For a full list of available arguments see the [Configuration](cfg.md) page.
|
||||
Train YOLO11n on the COCO8 dataset for 100 epochs at image size 640. For a full list of available arguments see the [Configuration](cfg.md) page.
|
||||
|
||||
!!! example
|
||||
|
||||
=== "Train"
|
||||
|
||||
Start training YOLOv8n on COCO8 for 100 epochs at image-size 640.
|
||||
Start training YOLO11n on COCO8 for 100 epochs at image-size 640.
|
||||
```bash
|
||||
yolo detect train data=coco8.yaml model=yolov8n.pt epochs=100 imgsz=640
|
||||
yolo detect train data=coco8.yaml model=yolo11n.pt epochs=100 imgsz=640
|
||||
```
|
||||
|
||||
=== "Resume"
|
||||
|
|
@ -109,15 +109,15 @@ Train YOLOv8n on the COCO8 dataset for 100 epochs at image size 640. For a full
|
|||
|
||||
## Val
|
||||
|
||||
Validate trained YOLOv8n model [accuracy](https://www.ultralytics.com/glossary/accuracy) on the COCO8 dataset. No arguments are needed as the `model` retains its training `data` and arguments as model attributes.
|
||||
Validate trained YOLO11n model [accuracy](https://www.ultralytics.com/glossary/accuracy) on the COCO8 dataset. No arguments are needed as the `model` retains its training `data` and arguments as model attributes.
|
||||
|
||||
!!! example
|
||||
|
||||
=== "Official"
|
||||
|
||||
Validate an official YOLOv8n model.
|
||||
Validate an official YOLO11n model.
|
||||
```bash
|
||||
yolo detect val model=yolov8n.pt
|
||||
yolo detect val model=yolo11n.pt
|
||||
```
|
||||
|
||||
=== "Custom"
|
||||
|
|
@ -129,15 +129,15 @@ Validate trained YOLOv8n model [accuracy](https://www.ultralytics.com/glossary/a
|
|||
|
||||
## Predict
|
||||
|
||||
Use a trained YOLOv8n model to run predictions on images.
|
||||
Use a trained YOLO11n model to run predictions on images.
|
||||
|
||||
!!! example
|
||||
|
||||
=== "Official"
|
||||
|
||||
Predict with an official YOLOv8n model.
|
||||
Predict with an official YOLO11n model.
|
||||
```bash
|
||||
yolo detect predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg'
|
||||
yolo detect predict model=yolo11n.pt source='https://ultralytics.com/images/bus.jpg'
|
||||
```
|
||||
|
||||
=== "Custom"
|
||||
|
|
@ -149,15 +149,15 @@ Use a trained YOLOv8n model to run predictions on images.
|
|||
|
||||
## Export
|
||||
|
||||
Export a YOLOv8n model to a different format like ONNX, CoreML, etc.
|
||||
Export a YOLO11n model to a different format like ONNX, CoreML, etc.
|
||||
|
||||
!!! example
|
||||
|
||||
=== "Official"
|
||||
|
||||
Export an official YOLOv8n model to ONNX format.
|
||||
Export an official YOLO11n model to ONNX format.
|
||||
```bash
|
||||
yolo export model=yolov8n.pt format=onnx
|
||||
yolo export model=yolo11n.pt format=onnx
|
||||
```
|
||||
|
||||
=== "Custom"
|
||||
|
|
@ -167,7 +167,7 @@ Export a YOLOv8n model to a different format like ONNX, CoreML, etc.
|
|||
yolo export model=path/to/best.pt format=onnx
|
||||
```
|
||||
|
||||
Available YOLOv8 export formats are in the table below. You can export to any format using the `format` argument, i.e. `format='onnx'` or `format='engine'`.
|
||||
Available YOLO11 export formats are in the table below. You can export to any format using the `format` argument, i.e. `format='onnx'` or `format='engine'`.
|
||||
|
||||
{% include "macros/export-table.md" %}
|
||||
|
||||
|
|
@ -183,21 +183,21 @@ Default arguments can be overridden by simply passing them as arguments in the C
|
|||
|
||||
Train a detection model for `10 epochs` with `learning_rate` of `0.01`
|
||||
```bash
|
||||
yolo detect train data=coco8.yaml model=yolov8n.pt epochs=10 lr0=0.01
|
||||
yolo detect train data=coco8.yaml model=yolo11n.pt epochs=10 lr0=0.01
|
||||
```
|
||||
|
||||
=== "Predict"
|
||||
|
||||
Predict a YouTube video using a pretrained segmentation model at image size 320:
|
||||
```bash
|
||||
yolo segment predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320
|
||||
yolo segment predict model=yolo11n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320
|
||||
```
|
||||
|
||||
=== "Val"
|
||||
|
||||
Validate a pretrained detection model at batch-size 1 and image size 640:
|
||||
```bash
|
||||
yolo detect val model=yolov8n.pt data=coco8.yaml batch=1 imgsz=640
|
||||
yolo detect val model=yolo11n.pt data=coco8.yaml batch=1 imgsz=640
|
||||
```
|
||||
|
||||
## Overriding default config file
|
||||
|
|
@ -219,19 +219,19 @@ This will create `default_copy.yaml`, which you can then pass as `cfg=default_co
|
|||
|
||||
## FAQ
|
||||
|
||||
### How do I use the Ultralytics YOLOv8 command line interface (CLI) for model training?
|
||||
### How do I use the Ultralytics YOLO11 command line interface (CLI) for model training?
|
||||
|
||||
To train a YOLOv8 model using the CLI, you can execute a simple one-line command in the terminal. For example, to train a detection model for 10 epochs with a [learning rate](https://www.ultralytics.com/glossary/learning-rate) of 0.01, you would run:
|
||||
To train a YOLO11 model using the CLI, you can execute a simple one-line command in the terminal. For example, to train a detection model for 10 epochs with a [learning rate](https://www.ultralytics.com/glossary/learning-rate) of 0.01, you would run:
|
||||
|
||||
```bash
|
||||
yolo train data=coco8.yaml model=yolov8n.pt epochs=10 lr0=0.01
|
||||
yolo train data=coco8.yaml model=yolo11n.pt epochs=10 lr0=0.01
|
||||
```
|
||||
|
||||
This command uses the `train` mode with specific arguments. Refer to the full list of available arguments in the [Configuration Guide](cfg.md).
|
||||
|
||||
### What tasks can I perform with the Ultralytics YOLOv8 CLI?
|
||||
### What tasks can I perform with the Ultralytics YOLO11 CLI?
|
||||
|
||||
The Ultralytics YOLOv8 CLI supports a variety of tasks including detection, segmentation, classification, validation, prediction, export, and tracking. For instance:
|
||||
The Ultralytics YOLO11 CLI supports a variety of tasks including detection, segmentation, classification, validation, prediction, export, and tracking. For instance:
|
||||
|
||||
- **Train a Model**: Run `yolo train data=<data.yaml> model=<model.pt> epochs=<num>`.
|
||||
- **Run Predictions**: Use `yolo predict model=<model.pt> source=<data_source> imgsz=<image_size>`.
|
||||
|
|
@ -239,32 +239,32 @@ The Ultralytics YOLOv8 CLI supports a variety of tasks including detection, segm
|
|||
|
||||
Each task can be customized with various arguments. For detailed syntax and examples, see the respective sections like [Train](#train), [Predict](#predict), and [Export](#export).
|
||||
|
||||
### How can I validate the accuracy of a trained YOLOv8 model using the CLI?
|
||||
### How can I validate the accuracy of a trained YOLO11 model using the CLI?
|
||||
|
||||
To validate a YOLOv8 model's accuracy, use the `val` mode. For example, to validate a pretrained detection model with a [batch size](https://www.ultralytics.com/glossary/batch-size) of 1 and image size of 640, run:
|
||||
To validate a YOLO11 model's accuracy, use the `val` mode. For example, to validate a pretrained detection model with a [batch size](https://www.ultralytics.com/glossary/batch-size) of 1 and image size of 640, run:
|
||||
|
||||
```bash
|
||||
yolo val model=yolov8n.pt data=coco8.yaml batch=1 imgsz=640
|
||||
yolo val model=yolo11n.pt data=coco8.yaml batch=1 imgsz=640
|
||||
```
|
||||
|
||||
This command evaluates the model on the specified dataset and provides performance metrics. For more details, refer to the [Val](#val) section.
|
||||
|
||||
### What formats can I export my YOLOv8 models to using the CLI?
|
||||
### What formats can I export my YOLO11 models to using the CLI?
|
||||
|
||||
YOLOv8 models can be exported to various formats such as ONNX, CoreML, TensorRT, and more. For instance, to export a model to ONNX format, run:
|
||||
YOLO11 models can be exported to various formats such as ONNX, CoreML, TensorRT, and more. For instance, to export a model to ONNX format, run:
|
||||
|
||||
```bash
|
||||
yolo export model=yolov8n.pt format=onnx
|
||||
yolo export model=yolo11n.pt format=onnx
|
||||
```
|
||||
|
||||
For complete details, visit the [Export](../modes/export.md) page.
|
||||
|
||||
### How do I customize YOLOv8 CLI commands to override default arguments?
|
||||
### How do I customize YOLO11 CLI commands to override default arguments?
|
||||
|
||||
To override default arguments in YOLOv8 CLI commands, pass them as `arg=value` pairs. For example, to train a model with custom arguments, use:
|
||||
To override default arguments in YOLO11 CLI commands, pass them as `arg=value` pairs. For example, to train a model with custom arguments, use:
|
||||
|
||||
```bash
|
||||
yolo train data=coco8.yaml model=yolov8n.pt epochs=10 lr0=0.01
|
||||
yolo train data=coco8.yaml model=yolo11n.pt epochs=10 lr0=0.01
|
||||
```
|
||||
|
||||
For a full list of available arguments and their descriptions, refer to the [Configuration Guide](cfg.md). Ensure arguments are formatted correctly, as shown in the [Overriding default arguments](#overriding-default-arguments) section.
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
---
|
||||
comments: true
|
||||
description: Learn to customize the YOLOv8 Trainer for specific tasks. Step-by-step instructions with Python examples for maximum model performance.
|
||||
keywords: Ultralytics, YOLOv8, Trainer Customization, Python, Machine Learning, AI, Model Training, DetectionTrainer, Custom Models
|
||||
description: Learn to customize the YOLO11 Trainer for specific tasks. Step-by-step instructions with Python examples for maximum model performance.
|
||||
keywords: Ultralytics, YOLO11, Trainer Customization, Python, Machine Learning, AI, Model Training, DetectionTrainer, Custom Models
|
||||
---
|
||||
|
||||
Both the Ultralytics YOLO command-line and Python interfaces are simply a high-level abstraction on the base engine executors. Let's take a look at the Trainer engine.
|
||||
|
|
@ -14,7 +14,7 @@ Both the Ultralytics YOLO command-line and Python interfaces are simply a high-l
|
|||
allowfullscreen>
|
||||
</iframe>
|
||||
<br>
|
||||
<strong>Watch:</strong> Mastering Ultralytics YOLOv8: Advanced Customization
|
||||
<strong>Watch:</strong> Mastering Ultralytics YOLO: Advanced Customization
|
||||
</p>
|
||||
|
||||
## BaseTrainer
|
||||
|
|
@ -26,7 +26,7 @@ BaseTrainer contains the generic boilerplate training routine. It can be customi
|
|||
|
||||
## DetectionTrainer
|
||||
|
||||
Here's how you can use the YOLOv8 `DetectionTrainer` and customize it.
|
||||
Here's how you can use the YOLO11 `DetectionTrainer` and customize it.
|
||||
|
||||
```python
|
||||
from ultralytics.models.yolo.detect import DetectionTrainer
|
||||
|
|
@ -96,9 +96,9 @@ There are other components that can be customized similarly like `Validators` an
|
|||
|
||||
## FAQ
|
||||
|
||||
### How do I customize the Ultralytics YOLOv8 DetectionTrainer for specific tasks?
|
||||
### How do I customize the Ultralytics YOLO11 DetectionTrainer for specific tasks?
|
||||
|
||||
To customize the Ultralytics YOLOv8 `DetectionTrainer` for a specific task, you can override its methods to adapt to your custom model and dataloader. Start by inheriting from `DetectionTrainer` and then redefine methods like `get_model` to implement your custom functionalities. Here's an example:
|
||||
To customize the Ultralytics YOLO11 `DetectionTrainer` for a specific task, you can override its methods to adapt to your custom model and dataloader. Start by inheriting from `DetectionTrainer` and then redefine methods like `get_model` to implement your custom functionalities. Here's an example:
|
||||
|
||||
```python
|
||||
from ultralytics.models.yolo.detect import DetectionTrainer
|
||||
|
|
@ -117,18 +117,18 @@ trained_model = trainer.best # get best model
|
|||
|
||||
For further customization like changing the `loss function` or adding a `callback`, you can reference our [Callbacks Guide](../usage/callbacks.md).
|
||||
|
||||
### What are the key components of the BaseTrainer in Ultralytics YOLOv8?
|
||||
### What are the key components of the BaseTrainer in Ultralytics YOLO11?
|
||||
|
||||
The `BaseTrainer` in Ultralytics YOLOv8 serves as the foundation for training routines and can be customized for various tasks by overriding its generic methods. Key components include:
|
||||
The `BaseTrainer` in Ultralytics YOLO11 serves as the foundation for training routines and can be customized for various tasks by overriding its generic methods. Key components include:
|
||||
|
||||
- `get_model(cfg, weights)` to build the model to be trained.
|
||||
- `get_dataloader()` to build the dataloader.
|
||||
|
||||
For more details on the customization and source code, see the [`BaseTrainer` Reference](../reference/engine/trainer.md).
|
||||
|
||||
### How can I add a callback to the Ultralytics YOLOv8 DetectionTrainer?
|
||||
### How can I add a callback to the Ultralytics YOLO11 DetectionTrainer?
|
||||
|
||||
You can add callbacks to monitor and modify the training process in Ultralytics YOLOv8 `DetectionTrainer`. For instance, here's how you can add a callback to log model weights after every training [epoch](https://www.ultralytics.com/glossary/epoch):
|
||||
You can add callbacks to monitor and modify the training process in Ultralytics YOLO11 `DetectionTrainer`. For instance, here's how you can add a callback to log model weights after every training [epoch](https://www.ultralytics.com/glossary/epoch):
|
||||
|
||||
```python
|
||||
from ultralytics.models.yolo.detect import DetectionTrainer
|
||||
|
|
@ -148,19 +148,19 @@ trainer.train()
|
|||
|
||||
For further details on callback events and entry points, refer to our [Callbacks Guide](../usage/callbacks.md).
|
||||
|
||||
### Why should I use Ultralytics YOLOv8 for model training?
|
||||
### Why should I use Ultralytics YOLO11 for model training?
|
||||
|
||||
Ultralytics YOLOv8 offers a high-level abstraction on powerful engine executors, making it ideal for rapid development and customization. Key benefits include:
|
||||
Ultralytics YOLO11 offers a high-level abstraction on powerful engine executors, making it ideal for rapid development and customization. Key benefits include:
|
||||
|
||||
- **Ease of Use**: Both command-line and Python interfaces simplify complex tasks.
|
||||
- **Performance**: Optimized for real-time [object detection](https://www.ultralytics.com/glossary/object-detection) and various vision AI applications.
|
||||
- **Customization**: Easily extendable for custom models, [loss functions](https://www.ultralytics.com/glossary/loss-function), and dataloaders.
|
||||
|
||||
Learn more about YOLOv8's capabilities by visiting [Ultralytics YOLO](https://www.ultralytics.com/yolo).
|
||||
Learn more about YOLO11's capabilities by visiting [Ultralytics YOLO](https://www.ultralytics.com/yolo).
|
||||
|
||||
### Can I use the Ultralytics YOLOv8 DetectionTrainer for non-standard models?
|
||||
### Can I use the Ultralytics YOLO11 DetectionTrainer for non-standard models?
|
||||
|
||||
Yes, Ultralytics YOLOv8 `DetectionTrainer` is highly flexible and can be customized for non-standard models. By inheriting from `DetectionTrainer`, you can overload different methods to support your specific model's needs. Here's a simple example:
|
||||
Yes, Ultralytics YOLO11 `DetectionTrainer` is highly flexible and can be customized for non-standard models. By inheriting from `DetectionTrainer`, you can overload different methods to support your specific model's needs. Here's a simple example:
|
||||
|
||||
```python
|
||||
from ultralytics.models.yolo.detect import DetectionTrainer
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
---
|
||||
comments: true
|
||||
description: Learn to integrate YOLOv8 in Python for object detection, segmentation, and classification. Load, train models, and make predictions easily with our comprehensive guide.
|
||||
keywords: YOLOv8, Python, object detection, segmentation, classification, machine learning, AI, pretrained models, train models, make predictions
|
||||
description: Learn to integrate YOLO11 in Python for object detection, segmentation, and classification. Load, train models, and make predictions easily with our comprehensive guide.
|
||||
keywords: YOLO11, Python, object detection, segmentation, classification, machine learning, AI, pretrained models, train models, make predictions
|
||||
---
|
||||
|
||||
# Python Usage
|
||||
|
||||
Welcome to the YOLOv8 Python Usage documentation! This guide is designed to help you seamlessly integrate YOLOv8 into your Python projects for [object detection](https://www.ultralytics.com/glossary/object-detection), segmentation, and classification. Here, you'll learn how to load and use pretrained models, train new models, and perform predictions on images. The easy-to-use Python interface is a valuable resource for anyone looking to incorporate YOLOv8 into their Python projects, allowing you to quickly implement advanced object detection capabilities. Let's get started!
|
||||
Welcome to the YOLO11 Python Usage documentation! This guide is designed to help you seamlessly integrate YOLO11 into your Python projects for [object detection](https://www.ultralytics.com/glossary/object-detection), segmentation, and classification. Here, you'll learn how to load and use pretrained models, train new models, and perform predictions on images. The easy-to-use Python interface is a valuable resource for anyone looking to incorporate YOLO11 into their Python projects, allowing you to quickly implement advanced object detection capabilities. Let's get started!
|
||||
|
||||
<p align="center">
|
||||
<br>
|
||||
|
|
@ -16,7 +16,7 @@ Welcome to the YOLOv8 Python Usage documentation! This guide is designed to help
|
|||
allowfullscreen>
|
||||
</iframe>
|
||||
<br>
|
||||
<strong>Watch:</strong> Mastering Ultralytics YOLOv8: Python
|
||||
<strong>Watch:</strong> Mastering Ultralytics YOLO11: Python
|
||||
</p>
|
||||
|
||||
For example, users can load a model, train it, evaluate its performance on a validation set, and even export it to ONNX format with just a few lines of code.
|
||||
|
|
@ -27,10 +27,10 @@ For example, users can load a model, train it, evaluate its performance on a val
|
|||
from ultralytics import YOLO
|
||||
|
||||
# Create a new YOLO model from scratch
|
||||
model = YOLO("yolov8n.yaml")
|
||||
model = YOLO("yolo11n.yaml")
|
||||
|
||||
# Load a pretrained YOLO model (recommended for training)
|
||||
model = YOLO("yolov8n.pt")
|
||||
model = YOLO("yolo11n.pt")
|
||||
|
||||
# Train the model using the 'coco8.yaml' dataset for 3 epochs
|
||||
results = model.train(data="coco8.yaml", epochs=3)
|
||||
|
|
@ -47,7 +47,7 @@ For example, users can load a model, train it, evaluate its performance on a val
|
|||
|
||||
## [Train](../modes/train.md)
|
||||
|
||||
Train mode is used for training a YOLOv8 model on a custom dataset. In this mode, the model is trained using the specified dataset and hyperparameters. The training process involves optimizing the model's parameters so that it can accurately predict the classes and locations of objects in an image.
|
||||
Train mode is used for training a YOLO11 model on a custom dataset. In this mode, the model is trained using the specified dataset and hyperparameters. The training process involves optimizing the model's parameters so that it can accurately predict the classes and locations of objects in an image.
|
||||
|
||||
!!! example "Train"
|
||||
|
||||
|
|
@ -56,7 +56,7 @@ Train mode is used for training a YOLOv8 model on a custom dataset. In this mode
|
|||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
model = YOLO("yolov8n.pt") # pass any model type
|
||||
model = YOLO("yolo11n.pt") # pass any model type
|
||||
results = model.train(epochs=5)
|
||||
```
|
||||
|
||||
|
|
@ -65,7 +65,7 @@ Train mode is used for training a YOLOv8 model on a custom dataset. In this mode
|
|||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
model = YOLO("yolov8n.yaml")
|
||||
model = YOLO("yolo11n.yaml")
|
||||
results = model.train(data="coco8.yaml", epochs=5)
|
||||
```
|
||||
|
||||
|
|
@ -80,7 +80,7 @@ Train mode is used for training a YOLOv8 model on a custom dataset. In this mode
|
|||
|
||||
## [Val](../modes/val.md)
|
||||
|
||||
Val mode is used for validating a YOLOv8 model after it has been trained. In this mode, the model is evaluated on a validation set to measure its [accuracy](https://www.ultralytics.com/glossary/accuracy) and generalization performance. This mode can be used to tune the hyperparameters of the model to improve its performance.
|
||||
Val mode is used for validating a YOLO11 model after it has been trained. In this mode, the model is evaluated on a validation set to measure its [accuracy](https://www.ultralytics.com/glossary/accuracy) and generalization performance. This mode can be used to tune the hyperparameters of the model to improve its performance.
|
||||
|
||||
!!! example "Val"
|
||||
|
||||
|
|
@ -89,8 +89,8 @@ Val mode is used for validating a YOLOv8 model after it has been trained. In thi
|
|||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
# Load a YOLOv8 model
|
||||
model = YOLO("yolov8n.yaml")
|
||||
# Load a YOLO11 model
|
||||
model = YOLO("yolo11n.yaml")
|
||||
|
||||
# Train the model
|
||||
model.train(data="coco8.yaml", epochs=5)
|
||||
|
|
@ -104,8 +104,8 @@ Val mode is used for validating a YOLOv8 model after it has been trained. In thi
|
|||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
# Load a YOLOv8 model
|
||||
model = YOLO("yolov8n.yaml")
|
||||
# Load a YOLO11 model
|
||||
model = YOLO("yolo11n.yaml")
|
||||
|
||||
# Train the model
|
||||
model.train(data="coco8.yaml", epochs=5)
|
||||
|
|
@ -118,7 +118,7 @@ Val mode is used for validating a YOLOv8 model after it has been trained. In thi
|
|||
|
||||
## [Predict](../modes/predict.md)
|
||||
|
||||
Predict mode is used for making predictions using a trained YOLOv8 model on new images or videos. In this mode, the model is loaded from a checkpoint file, and the user can provide images or videos to perform inference. The model predicts the classes and locations of objects in the input images or videos.
|
||||
Predict mode is used for making predictions using a trained YOLO11 model on new images or videos. In this mode, the model is loaded from a checkpoint file, and the user can provide images or videos to perform inference. The model predicts the classes and locations of objects in the input images or videos.
|
||||
|
||||
!!! example "Predict"
|
||||
|
||||
|
|
@ -189,27 +189,27 @@ Predict mode is used for making predictions using a trained YOLOv8 model on new
|
|||
|
||||
## [Export](../modes/export.md)
|
||||
|
||||
Export mode is used for exporting a YOLOv8 model to a format that can be used for deployment. In this mode, the model is converted to a format that can be used by other software applications or hardware devices. This mode is useful when deploying the model to production environments.
|
||||
Export mode is used for exporting a YOLO11 model to a format that can be used for deployment. In this mode, the model is converted to a format that can be used by other software applications or hardware devices. This mode is useful when deploying the model to production environments.
|
||||
|
||||
!!! example "Export"
|
||||
|
||||
=== "Export to ONNX"
|
||||
|
||||
Export an official YOLOv8n model to ONNX with dynamic batch-size and image-size.
|
||||
Export an official YOLO11n model to ONNX with dynamic batch-size and image-size.
|
||||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
model = YOLO("yolov8n.pt")
|
||||
model = YOLO("yolo11n.pt")
|
||||
model.export(format="onnx", dynamic=True)
|
||||
```
|
||||
|
||||
=== "Export to TensorRT"
|
||||
|
||||
Export an official YOLOv8n model to TensorRT on `device=0` for acceleration on CUDA devices.
|
||||
Export an official YOLO11n model to TensorRT on `device=0` for acceleration on CUDA devices.
|
||||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
model = YOLO("yolov8n.pt")
|
||||
model = YOLO("yolo11n.pt")
|
||||
model.export(format="onnx", device=0)
|
||||
```
|
||||
|
||||
|
|
@ -217,7 +217,7 @@ Export mode is used for exporting a YOLOv8 model to a format that can be used fo
|
|||
|
||||
## [Track](../modes/track.md)
|
||||
|
||||
Track mode is used for tracking objects in real-time using a YOLOv8 model. In this mode, the model is loaded from a checkpoint file, and the user can provide a live video stream to perform real-time object tracking. This mode is useful for applications such as surveillance systems or self-driving cars.
|
||||
Track mode is used for tracking objects in real-time using a YOLO11 model. In this mode, the model is loaded from a checkpoint file, and the user can provide a live video stream to perform real-time object tracking. This mode is useful for applications such as surveillance systems or self-driving cars.
|
||||
|
||||
!!! example "Track"
|
||||
|
||||
|
|
@ -227,8 +227,8 @@ Track mode is used for tracking objects in real-time using a YOLOv8 model. In th
|
|||
from ultralytics import YOLO
|
||||
|
||||
# Load a model
|
||||
model = YOLO("yolov8n.pt") # load an official detection model
|
||||
model = YOLO("yolov8n-seg.pt") # load an official segmentation model
|
||||
model = YOLO("yolo11n.pt") # load an official detection model
|
||||
model = YOLO("yolo11n-seg.pt") # load an official segmentation model
|
||||
model = YOLO("path/to/best.pt") # load a custom model
|
||||
|
||||
# Track with the model
|
||||
|
|
@ -240,18 +240,18 @@ Track mode is used for tracking objects in real-time using a YOLOv8 model. In th
|
|||
|
||||
## [Benchmark](../modes/benchmark.md)
|
||||
|
||||
Benchmark mode is used to profile the speed and accuracy of various export formats for YOLOv8. The benchmarks provide information on the size of the exported format, its `mAP50-95` metrics (for object detection and segmentation) or `accuracy_top5` metrics (for classification), and the inference time in milliseconds per image across various export formats like ONNX, OpenVINO, TensorRT and others. This information can help users choose the optimal export format for their specific use case based on their requirements for speed and accuracy.
|
||||
Benchmark mode is used to profile the speed and accuracy of various export formats for YOLO11. The benchmarks provide information on the size of the exported format, its `mAP50-95` metrics (for object detection and segmentation) or `accuracy_top5` metrics (for classification), and the inference time in milliseconds per image across various export formats like ONNX, OpenVINO, TensorRT and others. This information can help users choose the optimal export format for their specific use case based on their requirements for speed and accuracy.
|
||||
|
||||
!!! example "Benchmark"
|
||||
|
||||
=== "Python"
|
||||
|
||||
Benchmark an official YOLOv8n model across all export formats.
|
||||
Benchmark an official YOLO11n model across all export formats.
|
||||
```python
|
||||
from ultralytics.utils.benchmarks import benchmark
|
||||
|
||||
# Benchmark
|
||||
benchmark(model="yolov8n.pt", data="coco8.yaml", imgsz=640, half=False, device=0)
|
||||
benchmark(model="yolo11n.pt", data="coco8.yaml", imgsz=640, half=False, device=0)
|
||||
```
|
||||
|
||||
[Benchmark Examples](../modes/benchmark.md){ .md-button }
|
||||
|
|
@ -268,7 +268,7 @@ Explorer API can be used to explore datasets with advanced semantic, vector-simi
|
|||
from ultralytics import Explorer
|
||||
|
||||
# create an Explorer object
|
||||
exp = Explorer(data="coco8.yaml", model="yolov8n.pt")
|
||||
exp = Explorer(data="coco8.yaml", model="yolo11n.pt")
|
||||
exp.create_embeddings_table()
|
||||
|
||||
similar = exp.get_similar(img="https://ultralytics.com/images/bus.jpg", limit=10)
|
||||
|
|
@ -287,7 +287,7 @@ Explorer API can be used to explore datasets with advanced semantic, vector-simi
|
|||
from ultralytics import Explorer
|
||||
|
||||
# create an Explorer object
|
||||
exp = Explorer(data="coco8.yaml", model="yolov8n.pt")
|
||||
exp = Explorer(data="coco8.yaml", model="yolo11n.pt")
|
||||
exp.create_embeddings_table()
|
||||
|
||||
similar = exp.get_similar(idx=1, limit=10)
|
||||
|
|
@ -333,15 +333,15 @@ You can easily customize Trainers to support custom tasks or explore R&D ideas.
|
|||
|
||||
## FAQ
|
||||
|
||||
### How can I integrate YOLOv8 into my Python project for object detection?
|
||||
### How can I integrate YOLO11 into my Python project for object detection?
|
||||
|
||||
Integrating Ultralytics YOLOv8 into your Python projects is simple. You can load a pre-trained model or train a new model from scratch. Here's how to get started:
|
||||
Integrating Ultralytics YOLO11 into your Python projects is simple. You can load a pre-trained model or train a new model from scratch. Here's how to get started:
|
||||
|
||||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
# Load a pretrained YOLO model
|
||||
model = YOLO("yolov8n.pt")
|
||||
model = YOLO("yolo11n.pt")
|
||||
|
||||
# Perform object detection on an image
|
||||
results = model("https://ultralytics.com/images/bus.jpg")
|
||||
|
|
@ -353,9 +353,9 @@ for result in results:
|
|||
|
||||
See more detailed examples in our [Predict Mode](../modes/predict.md) section.
|
||||
|
||||
### What are the different modes available in YOLOv8?
|
||||
### What are the different modes available in YOLO11?
|
||||
|
||||
Ultralytics YOLOv8 provides various modes to cater to different [machine learning](https://www.ultralytics.com/glossary/machine-learning-ml) workflows. These include:
|
||||
Ultralytics YOLO11 provides various modes to cater to different [machine learning](https://www.ultralytics.com/glossary/machine-learning-ml) workflows. These include:
|
||||
|
||||
- **[Train](../modes/train.md)**: Train a model using custom datasets.
|
||||
- **[Val](../modes/val.md)**: Validate model performance on a validation set.
|
||||
|
|
@ -366,15 +366,15 @@ Ultralytics YOLOv8 provides various modes to cater to different [machine learnin
|
|||
|
||||
Each mode is designed to provide comprehensive functionalities for different stages of model development and deployment.
|
||||
|
||||
### How do I train a custom YOLOv8 model using my dataset?
|
||||
### How do I train a custom YOLO11 model using my dataset?
|
||||
|
||||
To train a custom YOLOv8 model, you need to specify your dataset and other hyperparameters. Here's a quick example:
|
||||
To train a custom YOLO11 model, you need to specify your dataset and other hyperparameters. Here's a quick example:
|
||||
|
||||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
# Load the YOLO model
|
||||
model = YOLO("yolov8n.yaml")
|
||||
model = YOLO("yolo11n.yaml")
|
||||
|
||||
# Train the model with custom dataset
|
||||
model.train(data="path/to/your/dataset.yaml", epochs=10)
|
||||
|
|
@ -382,15 +382,15 @@ model.train(data="path/to/your/dataset.yaml", epochs=10)
|
|||
|
||||
For more details on training and hyperlinks to example usage, visit our [Train Mode](../modes/train.md) page.
|
||||
|
||||
### How do I export YOLOv8 models for deployment?
|
||||
### How do I export YOLO11 models for deployment?
|
||||
|
||||
Exporting YOLOv8 models in a format suitable for deployment is straightforward with the `export` function. For example, you can export a model to ONNX format:
|
||||
Exporting YOLO11 models in a format suitable for deployment is straightforward with the `export` function. For example, you can export a model to ONNX format:
|
||||
|
||||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
# Load the YOLO model
|
||||
model = YOLO("yolov8n.pt")
|
||||
model = YOLO("yolo11n.pt")
|
||||
|
||||
# Export the model to ONNX format
|
||||
model.export(format="onnx")
|
||||
|
|
@ -398,15 +398,15 @@ model.export(format="onnx")
|
|||
|
||||
For various export options, refer to the [Export Mode](../modes/export.md) documentation.
|
||||
|
||||
### Can I validate my YOLOv8 model on different datasets?
|
||||
### Can I validate my YOLO11 model on different datasets?
|
||||
|
||||
Yes, validating YOLOv8 models on different datasets is possible. After training, you can use the validation mode to evaluate the performance:
|
||||
Yes, validating YOLO11 models on different datasets is possible. After training, you can use the validation mode to evaluate the performance:
|
||||
|
||||
```python
|
||||
from ultralytics import YOLO
|
||||
|
||||
# Load a YOLOv8 model
|
||||
model = YOLO("yolov8n.yaml")
|
||||
# Load a YOLO11 model
|
||||
model = YOLO("yolo11n.yaml")
|
||||
|
||||
# Train the model
|
||||
model.train(data="coco8.yaml", epochs=5)
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ from ultralytics.data.annotator import auto_annotate
|
|||
|
||||
auto_annotate( # (1)!
|
||||
data="path/to/new/data",
|
||||
det_model="yolov8n.pt",
|
||||
det_model="yolo11n.pt",
|
||||
sam_model="mobile_sam.pt",
|
||||
device="cuda",
|
||||
output_dir="path/to/save_labels",
|
||||
|
|
@ -93,7 +93,7 @@ from ultralytics.utils.plotting import Annotator
|
|||
from ultralytics import YOLO
|
||||
import cv2
|
||||
|
||||
model = YOLO('yolov8n.pt') # Load pretrain or fine-tune model
|
||||
model = YOLO('yolo11n.pt') # Load pretrain or fine-tune model
|
||||
|
||||
# Process the image
|
||||
source = cv2.imread('path/to/image.jpg')
|
||||
|
|
@ -468,7 +468,7 @@ import cv2
|
|||
from ultralytics import YOLO
|
||||
from ultralytics.utils.plotting import Annotator
|
||||
|
||||
model = YOLO("yolov8s.pt")
|
||||
model = YOLO("yolo11s.pt")
|
||||
names = model.names
|
||||
cap = cv2.VideoCapture("path/to/video/file.mp4")
|
||||
|
||||
|
|
@ -507,7 +507,7 @@ import cv2
|
|||
from ultralytics import YOLO
|
||||
from ultralytics.utils.plotting import Annotator
|
||||
|
||||
model = YOLO("yolov8s.pt")
|
||||
model = YOLO("yolo11s.pt")
|
||||
names = model.names
|
||||
cap = cv2.VideoCapture("path/to/video/file.mp4")
|
||||
|
||||
|
|
@ -598,7 +598,7 @@ from ultralytics.data.annotator import auto_annotate
|
|||
|
||||
auto_annotate(
|
||||
data="path/to/new/data",
|
||||
det_model="yolov8n.pt",
|
||||
det_model="yolo11n.pt",
|
||||
sam_model="mobile_sam.pt",
|
||||
device="cuda",
|
||||
output_dir="path/to/save_labels",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue