Ultralytics Asset URL Update (#14345)
Signed-off-by: Glenn Jocher <glenn.jocher@ultralytics.com> Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
This commit is contained in:
parent
470b120a1b
commit
2d332a1cb1
35 changed files with 55 additions and 53 deletions
|
|
@ -34,7 +34,7 @@ RUN rm -rf /usr/lib/python3.11/EXTERNALLY-MANAGED
|
||||||
# Install pip packages
|
# Install pip packages
|
||||||
# Install tensorstore from .whl because PyPI does not include aarch64 binaries
|
# Install tensorstore from .whl because PyPI does not include aarch64 binaries
|
||||||
RUN python3 -m pip install --upgrade pip wheel
|
RUN python3 -m pip install --upgrade pip wheel
|
||||||
RUN pip install --no-cache-dir https://github.com/ultralytics/yolov5/releases/download/v1.0/tensorstore-0.1.59-cp311-cp311-linux_aarch64.whl -e ".[export]"
|
RUN pip install --no-cache-dir https://github.com/ultralytics/assets/releases/download/v0.0.0/tensorstore-0.1.59-cp311-cp311-linux_aarch64.whl -e ".[export]"
|
||||||
|
|
||||||
# Creates a symbolic link to make 'python' point to 'python3'
|
# Creates a symbolic link to make 'python' point to 'python3'
|
||||||
RUN ln -sf /usr/bin/python3 /usr/bin/python
|
RUN ln -sf /usr/bin/python3 /usr/bin/python
|
||||||
|
|
|
||||||
|
|
@ -38,9 +38,9 @@ ADD https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n.pt $A
|
||||||
# Other versions can be seen in https://elinux.org/Jetson_Zoo and https://forums.developer.nvidia.com/t/pytorch-for-jetson/72048
|
# Other versions can be seen in https://elinux.org/Jetson_Zoo and https://forums.developer.nvidia.com/t/pytorch-for-jetson/72048
|
||||||
ADD https://nvidia.box.com/shared/static/gjqofg7rkg97z3gc8jeyup6t8n9j8xjw.whl onnxruntime_gpu-1.8.0-cp38-cp38-linux_aarch64.whl
|
ADD https://nvidia.box.com/shared/static/gjqofg7rkg97z3gc8jeyup6t8n9j8xjw.whl onnxruntime_gpu-1.8.0-cp38-cp38-linux_aarch64.whl
|
||||||
ADD https://forums.developer.nvidia.com/uploads/short-url/hASzFOm9YsJx6VVFrDW1g44CMmv.whl tensorrt-8.2.0.6-cp38-none-linux_aarch64.whl
|
ADD https://forums.developer.nvidia.com/uploads/short-url/hASzFOm9YsJx6VVFrDW1g44CMmv.whl tensorrt-8.2.0.6-cp38-none-linux_aarch64.whl
|
||||||
ADD https://github.com/ultralytics/yolov5/releases/download/v1.0/torch-1.11.0a0+gitbc2c6ed-cp38-cp38-linux_aarch64.whl \
|
ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/torch-1.11.0a0+gitbc2c6ed-cp38-cp38-linux_aarch64.whl \
|
||||||
torch-1.11.0a0+gitbc2c6ed-cp38-cp38-linux_aarch64.whl
|
torch-1.11.0a0+gitbc2c6ed-cp38-cp38-linux_aarch64.whl
|
||||||
ADD https://github.com/ultralytics/yolov5/releases/download/v1.0/torchvision-0.12.0a0+9b5a3fe-cp38-cp38-linux_aarch64.whl \
|
ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/torchvision-0.12.0a0+9b5a3fe-cp38-cp38-linux_aarch64.whl \
|
||||||
torchvision-0.12.0a0+9b5a3fe-cp38-cp38-linux_aarch64.whl
|
torchvision-0.12.0a0+9b5a3fe-cp38-cp38-linux_aarch64.whl
|
||||||
|
|
||||||
# Install pip packages
|
# Install pip packages
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ keywords: ImageNet10, ImageNet, Ultralytics, CI tests, sanity checks, training p
|
||||||
|
|
||||||
# ImageNet10 Dataset
|
# ImageNet10 Dataset
|
||||||
|
|
||||||
The [ImageNet10](https://github.com/ultralytics/yolov5/releases/download/v1.0/imagenet10.zip) dataset is a small-scale subset of the [ImageNet](https://www.image-net.org/) database, developed by [Ultralytics](https://ultralytics.com) and designed for CI tests, sanity checks, and fast testing of training pipelines. This dataset is composed of the first image in the training set and the first image from the validation set of the first 10 classes in ImageNet. Although significantly smaller, it retains the structure and diversity of the original ImageNet dataset.
|
The [ImageNet10](https://github.com/ultralytics/assets/releases/download/v0.0.0/imagenet10.zip) dataset is a small-scale subset of the [ImageNet](https://www.image-net.org/) database, developed by [Ultralytics](https://ultralytics.com) and designed for CI tests, sanity checks, and fast testing of training pipelines. This dataset is composed of the first image in the training set and the first image from the validation set of the first 10 classes in ImageNet. Although significantly smaller, it retains the structure and diversity of the original ImageNet dataset.
|
||||||
|
|
||||||
## Key Features
|
## Key Features
|
||||||
|
|
||||||
|
|
@ -80,7 +80,7 @@ We would like to acknowledge the ImageNet team, led by Olga Russakovsky, Jia Den
|
||||||
|
|
||||||
### What is the ImageNet10 dataset and how is it different from the full ImageNet dataset?
|
### What is the ImageNet10 dataset and how is it different from the full ImageNet dataset?
|
||||||
|
|
||||||
The [ImageNet10](https://github.com/ultralytics/yolov5/releases/download/v1.0/imagenet10.zip) dataset is a compact subset of the original [ImageNet](https://www.image-net.org/) database, created by Ultralytics for rapid CI tests, sanity checks, and training pipeline evaluations. ImageNet10 comprises only 20 images, representing the first image in the training and validation sets of the first 10 classes in ImageNet. Despite its small size, it maintains the structure and diversity of the full dataset, making it ideal for quick testing but not for benchmarking models.
|
The [ImageNet10](https://github.com/ultralytics/assets/releases/download/v0.0.0/imagenet10.zip) dataset is a compact subset of the original [ImageNet](https://www.image-net.org/) database, created by Ultralytics for rapid CI tests, sanity checks, and training pipeline evaluations. ImageNet10 comprises only 20 images, representing the first image in the training and validation sets of the first 10 classes in ImageNet. Despite its small size, it maintains the structure and diversity of the full dataset, making it ideal for quick testing but not for benchmarking models.
|
||||||
|
|
||||||
### How can I use the ImageNet10 dataset to test my deep learning model?
|
### How can I use the ImageNet10 dataset to test my deep learning model?
|
||||||
|
|
||||||
|
|
@ -124,4 +124,4 @@ The ImageNet10 dataset has several key features:
|
||||||
|
|
||||||
### Where can I download the ImageNet10 dataset?
|
### Where can I download the ImageNet10 dataset?
|
||||||
|
|
||||||
You can download the ImageNet10 dataset from the [Ultralytics GitHub releases page](https://github.com/ultralytics/yolov5/releases/download/v1.0/imagenet10.zip). For more detailed information about its structure and applications, refer to the [ImageNet10 Dataset](imagenet10.md) page.
|
You can download the ImageNet10 dataset from the [Ultralytics GitHub releases page](https://github.com/ultralytics/assets/releases/download/v0.0.0/imagenet10.zip). For more detailed information about its structure and applications, refer to the [ImageNet10 Dataset](imagenet10.md) page.
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ By using the TensorRT export format, you can enhance your [Ultralytics YOLOv8](h
|
||||||
## TensorRT
|
## TensorRT
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<img width="100%" src="https://github.com/ultralytics/yolov5/releases/download/v1.0/tensorrt-overview.jpg" alt="TensorRT Overview">
|
<img width="100%" src="https://github.com/ultralytics/ultralytics/assets/26833433/7fea48c2-9709-4deb-8d04-eaf95d12a91d" alt="TensorRT Overview">
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
[TensorRT](https://developer.nvidia.com/tensorrt), developed by NVIDIA, is an advanced software development kit (SDK) designed for high-speed deep learning inference. It's well-suited for real-time applications like object detection.
|
[TensorRT](https://developer.nvidia.com/tensorrt), developed by NVIDIA, is an advanced software development kit (SDK) designed for high-speed deep learning inference. It's well-suited for real-time applications like object detection.
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@ pip install -r requirements.txt # install
|
||||||
|
|
||||||
Select a pretrained model to start training from. Here we select [YOLOv5s](https://github.com/ultralytics/yolov5/blob/master/models/yolov5s.yaml), the smallest and fastest model available. See our README [table](https://github.com/ultralytics/yolov5#pretrained-checkpoints) for a full comparison of all models. We will train this model with Multi-GPU on the [COCO](https://github.com/ultralytics/yolov5/blob/master/data/scripts/get_coco.sh) dataset.
|
Select a pretrained model to start training from. Here we select [YOLOv5s](https://github.com/ultralytics/yolov5/blob/master/models/yolov5s.yaml), the smallest and fastest model available. See our README [table](https://github.com/ultralytics/yolov5#pretrained-checkpoints) for a full comparison of all models. We will train this model with Multi-GPU on the [COCO](https://github.com/ultralytics/yolov5/blob/master/data/scripts/get_coco.sh) dataset.
|
||||||
|
|
||||||
<p align="center"><img width="700" alt="YOLOv5 Models" src="https://github.com/ultralytics/yolov5/releases/download/v1.0/model_comparison.png"></p>
|
<p align="center"><img width="700" alt="YOLOv5 Models" src="https://github.com/ultralytics/assets/releases/download/v0.0.0/model_comparison.png"></p>
|
||||||
|
|
||||||
### Single GPU
|
### Single GPU
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ We've put together a full guide for users looking to get the best results on the
|
||||||
|
|
||||||
Larger models like YOLOv5x and [YOLOv5x6](https://github.com/ultralytics/yolov5/releases/tag/v5.0) will produce better results in nearly all cases, but have more parameters, require more CUDA memory to train, and are slower to run. For **mobile** deployments we recommend YOLOv5s/m, for **cloud** deployments we recommend YOLOv5l/x. See our README [table](https://github.com/ultralytics/yolov5#pretrained-checkpoints) for a full comparison of all models.
|
Larger models like YOLOv5x and [YOLOv5x6](https://github.com/ultralytics/yolov5/releases/tag/v5.0) will produce better results in nearly all cases, but have more parameters, require more CUDA memory to train, and are slower to run. For **mobile** deployments we recommend YOLOv5s/m, for **cloud** deployments we recommend YOLOv5l/x. See our README [table](https://github.com/ultralytics/yolov5#pretrained-checkpoints) for a full comparison of all models.
|
||||||
|
|
||||||
<p align="center"><img width="700" alt="YOLOv5 Models" src="https://github.com/ultralytics/yolov5/releases/download/v1.0/model_comparison.png"></p>
|
<p align="center"><img width="700" alt="YOLOv5 Models" src="https://github.com/ultralytics/assets/releases/download/v0.0.0/model_comparison.png"></p>
|
||||||
|
|
||||||
- **Start from Pretrained weights.** Recommended for small to medium-sized datasets (i.e. [VOC](https://github.com/ultralytics/yolov5/blob/master/data/VOC.yaml), [VisDrone](https://github.com/ultralytics/yolov5/blob/master/data/VisDrone.yaml), [GlobalWheat](https://github.com/ultralytics/yolov5/blob/master/data/GlobalWheat2020.yaml)). Pass the name of the model to the `--weights` argument. Models download automatically from the [latest YOLOv5 release](https://github.com/ultralytics/yolov5/releases).
|
- **Start from Pretrained weights.** Recommended for small to medium-sized datasets (i.e. [VOC](https://github.com/ultralytics/yolov5/blob/master/data/VOC.yaml), [VisDrone](https://github.com/ultralytics/yolov5/blob/master/data/VisDrone.yaml), [GlobalWheat](https://github.com/ultralytics/yolov5/blob/master/data/GlobalWheat2020.yaml)). Pass the name of the model to the `--weights` argument. Models download automatically from the [latest YOLOv5 release](https://github.com/ultralytics/yolov5/releases).
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -127,7 +127,7 @@ Organize your train and val images and labels according to the example below. YO
|
||||||
|
|
||||||
Select a pretrained model to start training from. Here we select [YOLOv5s](https://github.com/ultralytics/yolov5/blob/master/models/yolov5s.yaml), the second-smallest and fastest model available. See our README [table](https://github.com/ultralytics/yolov5#pretrained-checkpoints) for a full comparison of all models.
|
Select a pretrained model to start training from. Here we select [YOLOv5s](https://github.com/ultralytics/yolov5/blob/master/models/yolov5s.yaml), the second-smallest and fastest model available. See our README [table](https://github.com/ultralytics/yolov5#pretrained-checkpoints) for a full comparison of all models.
|
||||||
|
|
||||||
<p align="center"><img width="800" alt="YOLOv5 models" src="https://github.com/ultralytics/yolov5/releases/download/v1.0/model_comparison.png"></p>
|
<p align="center"><img width="800" alt="YOLOv5 models" src="https://github.com/ultralytics/assets/releases/download/v0.0.0/model_comparison.png"></p>
|
||||||
|
|
||||||
## 4. Train
|
## 4. Train
|
||||||
|
|
||||||
|
|
@ -185,7 +185,7 @@ Training results are automatically logged with [Tensorboard](https://www.tensorf
|
||||||
|
|
||||||
This directory contains train and val statistics, mosaics, labels, predictions and augmented mosaics, as well as metrics and charts including precision-recall (PR) curves and confusion matrices.
|
This directory contains train and val statistics, mosaics, labels, predictions and augmented mosaics, as well as metrics and charts including precision-recall (PR) curves and confusion matrices.
|
||||||
|
|
||||||
<img alt="Local logging results" src="https://github.com/ultralytics/yolov5/releases/download/v1.0/image-local_logging.jpg" width="1280">
|
<img alt="Local logging results" src="https://github.com/ultralytics/assets/releases/download/v0.0.0/image-local_logging.jpg" width="1280">
|
||||||
|
|
||||||
Results file `results.csv` is updated after each epoch, and then plotted as `results.png` (below) after training completes. You can also plot any `results.csv` file manually:
|
Results file `results.csv` is updated after each epoch, and then plotted as `results.png` (below) after training completes. You can also plot any `results.csv` file manually:
|
||||||
|
|
||||||
|
|
@ -195,7 +195,7 @@ from utils.plots import plot_results
|
||||||
plot_results("path/to/results.csv") # plot 'results.csv' as 'results.png'
|
plot_results("path/to/results.csv") # plot 'results.csv' as 'results.png'
|
||||||
```
|
```
|
||||||
|
|
||||||
<p align="center"><img width="800" alt="results.png" src="https://github.com/ultralytics/yolov5/releases/download/v1.0/results.png"></p>
|
<p align="center"><img width="800" alt="results.png" src="https://github.com/ultralytics/assets/releases/download/v0.0.0/results.png"></p>
|
||||||
|
|
||||||
## Next Steps
|
## Next Steps
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -90,7 +90,7 @@ def test_predict_img(model_name):
|
||||||
batch = [
|
batch = [
|
||||||
str(SOURCE), # filename
|
str(SOURCE), # filename
|
||||||
Path(SOURCE), # Path
|
Path(SOURCE), # Path
|
||||||
"https://github.com/ultralytics/yolov5/releases/download/v1.0/zidane.jpg" if ONLINE else SOURCE, # URI
|
"https://github.com/ultralytics/assets/releases/download/v0.0.0/zidane.jpg" if ONLINE else SOURCE, # URI
|
||||||
cv2.imread(str(SOURCE)), # OpenCV
|
cv2.imread(str(SOURCE)), # OpenCV
|
||||||
Image.open(SOURCE), # PIL
|
Image.open(SOURCE), # PIL
|
||||||
np.zeros((320, 640, 3), dtype=np.uint8), # numpy
|
np.zeros((320, 640, 3), dtype=np.uint8), # numpy
|
||||||
|
|
@ -149,7 +149,7 @@ def test_track_stream():
|
||||||
|
|
||||||
Note imgsz=160 required for tracking for higher confidence and better matches.
|
Note imgsz=160 required for tracking for higher confidence and better matches.
|
||||||
"""
|
"""
|
||||||
video_url = "https://github.com/ultralytics/yolov5/releases/download/v1.0/decelera_portrait_min.mov"
|
video_url = "https://github.com/ultralytics/assets/releases/download/v0.0.0/decelera_portrait_min.mov"
|
||||||
model = YOLO(MODEL)
|
model = YOLO(MODEL)
|
||||||
model.track(video_url, imgsz=160, tracker="bytetrack.yaml")
|
model.track(video_url, imgsz=160, tracker="bytetrack.yaml")
|
||||||
model.track(video_url, imgsz=160, tracker="botsort.yaml", save_frames=True) # test frame saving also
|
model.track(video_url, imgsz=160, tracker="botsort.yaml", save_frames=True) # test frame saving also
|
||||||
|
|
@ -290,7 +290,7 @@ def test_data_converter():
|
||||||
from ultralytics.data.converter import coco80_to_coco91_class, convert_coco
|
from ultralytics.data.converter import coco80_to_coco91_class, convert_coco
|
||||||
|
|
||||||
file = "instances_val2017.json"
|
file = "instances_val2017.json"
|
||||||
download(f"https://github.com/ultralytics/yolov5/releases/download/v1.0/{file}", dir=TMP)
|
download(f"https://github.com/ultralytics/assets/releases/download/v0.0.0/{file}", dir=TMP)
|
||||||
convert_coco(labels_dir=TMP, save_dir=TMP / "yolo_labels", use_segments=True, use_keypoints=False, cls91to80=True)
|
convert_coco(labels_dir=TMP, save_dir=TMP / "yolo_labels", use_segments=True, use_keypoints=False, cls91to80=True)
|
||||||
coco80_to_coco91_class()
|
coco80_to_coco91_class()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -33,4 +33,4 @@ names:
|
||||||
15: container crane
|
15: container crane
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/DOTAv1.5.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/DOTAv1.5.zip
|
||||||
|
|
|
||||||
|
|
@ -32,4 +32,4 @@ names:
|
||||||
14: swimming pool
|
14: swimming pool
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/DOTAv1.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/DOTAv1.zip
|
||||||
|
|
|
||||||
|
|
@ -37,7 +37,7 @@ download: |
|
||||||
# Download
|
# Download
|
||||||
dir = Path(yaml['path']) # dataset root dir
|
dir = Path(yaml['path']) # dataset root dir
|
||||||
urls = ['https://zenodo.org/record/4298502/files/global-wheat-codalab-official.zip',
|
urls = ['https://zenodo.org/record/4298502/files/global-wheat-codalab-official.zip',
|
||||||
'https://github.com/ultralytics/yolov5/releases/download/v1.0/GlobalWheat2020_labels.zip']
|
'https://github.com/ultralytics/assets/releases/download/v0.0.0/GlobalWheat2020_labels.zip']
|
||||||
download(urls, dir=dir)
|
download(urls, dir=dir)
|
||||||
|
|
||||||
# Make Directories
|
# Make Directories
|
||||||
|
|
|
||||||
|
|
@ -76,7 +76,7 @@ download: |
|
||||||
|
|
||||||
# Download
|
# Download
|
||||||
dir = Path(yaml['path']) # dataset root dir
|
dir = Path(yaml['path']) # dataset root dir
|
||||||
url = 'https://github.com/ultralytics/yolov5/releases/download/v1.0/'
|
url = 'https://github.com/ultralytics/assets/releases/download/v0.0.0/'
|
||||||
urls = [f'{url}VOCtrainval_06-Nov-2007.zip', # 446MB, 5012 images
|
urls = [f'{url}VOCtrainval_06-Nov-2007.zip', # 446MB, 5012 images
|
||||||
f'{url}VOCtest_06-Nov-2007.zip', # 438MB, 4953 images
|
f'{url}VOCtest_06-Nov-2007.zip', # 438MB, 4953 images
|
||||||
f'{url}VOCtrainval_11-May-2012.zip'] # 1.95GB, 17126 images
|
f'{url}VOCtrainval_11-May-2012.zip'] # 1.95GB, 17126 images
|
||||||
|
|
|
||||||
|
|
@ -61,10 +61,10 @@ download: |
|
||||||
|
|
||||||
# Download
|
# Download
|
||||||
dir = Path(yaml['path']) # dataset root dir
|
dir = Path(yaml['path']) # dataset root dir
|
||||||
urls = ['https://github.com/ultralytics/yolov5/releases/download/v1.0/VisDrone2019-DET-train.zip',
|
urls = ['https://github.com/ultralytics/assets/releases/download/v0.0.0/VisDrone2019-DET-train.zip',
|
||||||
'https://github.com/ultralytics/yolov5/releases/download/v1.0/VisDrone2019-DET-val.zip',
|
'https://github.com/ultralytics/assets/releases/download/v0.0.0/VisDrone2019-DET-val.zip',
|
||||||
'https://github.com/ultralytics/yolov5/releases/download/v1.0/VisDrone2019-DET-test-dev.zip',
|
'https://github.com/ultralytics/assets/releases/download/v0.0.0/VisDrone2019-DET-test-dev.zip',
|
||||||
'https://github.com/ultralytics/yolov5/releases/download/v1.0/VisDrone2019-DET-test-challenge.zip']
|
'https://github.com/ultralytics/assets/releases/download/v0.0.0/VisDrone2019-DET-test-challenge.zip']
|
||||||
download(urls, dir=dir, curl=True, threads=4)
|
download(urls, dir=dir, curl=True, threads=4)
|
||||||
|
|
||||||
# Convert
|
# Convert
|
||||||
|
|
|
||||||
|
|
@ -21,4 +21,4 @@ names:
|
||||||
3: zebra
|
3: zebra
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/african-wildlife.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/african-wildlife.zip
|
||||||
|
|
|
||||||
|
|
@ -19,4 +19,4 @@ names:
|
||||||
1: positive
|
1: positive
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/brain-tumor.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/brain-tumor.zip
|
||||||
|
|
|
||||||
|
|
@ -40,4 +40,4 @@ names:
|
||||||
22: wheel
|
22: wheel
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/carparts-seg.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/carparts-seg.zip
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ download: |
|
||||||
|
|
||||||
# Download labels
|
# Download labels
|
||||||
dir = Path(yaml['path']) # dataset root dir
|
dir = Path(yaml['path']) # dataset root dir
|
||||||
url = 'https://github.com/ultralytics/yolov5/releases/download/v1.0/'
|
url = 'https://github.com/ultralytics/assets/releases/download/v0.0.0/'
|
||||||
urls = [url + 'coco2017labels-pose.zip'] # labels
|
urls = [url + 'coco2017labels-pose.zip'] # labels
|
||||||
download(urls, dir=dir.parent)
|
download(urls, dir=dir.parent)
|
||||||
# Download data
|
# Download data
|
||||||
|
|
|
||||||
|
|
@ -104,7 +104,7 @@ download: |
|
||||||
# Download labels
|
# Download labels
|
||||||
segments = True # segment or box labels
|
segments = True # segment or box labels
|
||||||
dir = Path(yaml['path']) # dataset root dir
|
dir = Path(yaml['path']) # dataset root dir
|
||||||
url = 'https://github.com/ultralytics/yolov5/releases/download/v1.0/'
|
url = 'https://github.com/ultralytics/assets/releases/download/v0.0.0/'
|
||||||
urls = [url + ('coco2017labels-segments.zip' if segments else 'coco2017labels.zip')] # labels
|
urls = [url + ('coco2017labels-segments.zip' if segments else 'coco2017labels.zip')] # labels
|
||||||
download(urls, dir=dir.parent)
|
download(urls, dir=dir.parent)
|
||||||
# Download data
|
# Download data
|
||||||
|
|
|
||||||
|
|
@ -97,4 +97,4 @@ names:
|
||||||
79: toothbrush
|
79: toothbrush
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/coco128-seg.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/coco128-seg.zip
|
||||||
|
|
|
||||||
|
|
@ -97,4 +97,4 @@ names:
|
||||||
79: toothbrush
|
79: toothbrush
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/coco128.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/coco128.zip
|
||||||
|
|
|
||||||
|
|
@ -22,4 +22,4 @@ names:
|
||||||
0: person
|
0: person
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/coco8-pose.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/coco8-pose.zip
|
||||||
|
|
|
||||||
|
|
@ -97,4 +97,4 @@ names:
|
||||||
79: toothbrush
|
79: toothbrush
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/coco8-seg.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/coco8-seg.zip
|
||||||
|
|
|
||||||
|
|
@ -97,4 +97,4 @@ names:
|
||||||
79: toothbrush
|
79: toothbrush
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/coco8.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/coco8.zip
|
||||||
|
|
|
||||||
|
|
@ -18,4 +18,4 @@ names:
|
||||||
0: crack
|
0: crack
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/crack-seg.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/crack-seg.zip
|
||||||
|
|
|
||||||
|
|
@ -31,4 +31,4 @@ names:
|
||||||
14: swimming pool
|
14: swimming pool
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/dota8.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/dota8.zip
|
||||||
|
|
|
||||||
|
|
@ -1225,7 +1225,7 @@ download: |
|
||||||
|
|
||||||
# Download labels
|
# Download labels
|
||||||
dir = Path(yaml['path']) # dataset root dir
|
dir = Path(yaml['path']) # dataset root dir
|
||||||
url = 'https://github.com/ultralytics/yolov5/releases/download/v1.0/'
|
url = 'https://github.com/ultralytics/assets/releases/download/v0.0.0/'
|
||||||
urls = [url + 'lvis-labels-segments.zip'] # labels
|
urls = [url + 'lvis-labels-segments.zip'] # labels
|
||||||
download(urls, dir=dir.parent)
|
download(urls, dir=dir.parent)
|
||||||
# Download data
|
# Download data
|
||||||
|
|
|
||||||
|
|
@ -18,4 +18,4 @@ names:
|
||||||
0: package
|
0: package
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/package-seg.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/package-seg.zip
|
||||||
|
|
|
||||||
|
|
@ -17,4 +17,4 @@ names:
|
||||||
0: signature
|
0: signature
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/signature.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/signature.zip
|
||||||
|
|
|
||||||
|
|
@ -21,4 +21,4 @@ names:
|
||||||
0: tiger
|
0: tiger
|
||||||
|
|
||||||
# Download script/URL (optional)
|
# Download script/URL (optional)
|
||||||
download: https://github.com/ultralytics/yolov5/releases/download/v1.0/tiger-pose.zip
|
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/tiger-pose.zip
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ fi
|
||||||
|
|
||||||
# Download/unzip labels
|
# Download/unzip labels
|
||||||
d='../datasets' # unzip directory
|
d='../datasets' # unzip directory
|
||||||
url=https://github.com/ultralytics/yolov5/releases/download/v1.0/
|
url=https://github.com/ultralytics/assets/releases/download/v0.0.0/
|
||||||
if [ "$segments" == "true" ]; then
|
if [ "$segments" == "true" ]; then
|
||||||
f='coco2017labels-segments.zip' # 169 MB
|
f='coco2017labels-segments.zip' # 169 MB
|
||||||
elif [ "$sama" == "true" ]; then
|
elif [ "$sama" == "true" ]; then
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@
|
||||||
|
|
||||||
# Download/unzip images and labels
|
# Download/unzip images and labels
|
||||||
d='../datasets' # unzip directory
|
d='../datasets' # unzip directory
|
||||||
url=https://github.com/ultralytics/yolov5/releases/download/v1.0/
|
url=https://github.com/ultralytics/assets/releases/download/v0.0.0/
|
||||||
f='coco128.zip' # or 'coco128-segments.zip', 68 MB
|
f='coco128.zip' # or 'coco128-segments.zip', 68 MB
|
||||||
echo 'Downloading' $url$f ' ...'
|
echo 'Downloading' $url$f ' ...'
|
||||||
curl -L $url$f -o $f -# && unzip -q $f -d $d && rm $f &
|
curl -L $url$f -o $f -# && unzip -q $f -d $d && rm $f &
|
||||||
|
|
|
||||||
|
|
@ -379,7 +379,7 @@ def check_cls_dataset(dataset, split=""):
|
||||||
if str(dataset) == "imagenet":
|
if str(dataset) == "imagenet":
|
||||||
subprocess.run(f"bash {ROOT / 'data/scripts/get_imagenet.sh'}", shell=True, check=True)
|
subprocess.run(f"bash {ROOT / 'data/scripts/get_imagenet.sh'}", shell=True, check=True)
|
||||||
else:
|
else:
|
||||||
url = f"https://github.com/ultralytics/yolov5/releases/download/v1.0/{dataset}.zip"
|
url = f"https://github.com/ultralytics/assets/releases/download/v0.0.0/{dataset}.zip"
|
||||||
download(url, dir=data_dir.parent)
|
download(url, dir=data_dir.parent)
|
||||||
s = f"Dataset download success ✅ ({time.time() - t:.1f}s), saved to {colorstr('bold', data_dir)}\n"
|
s = f"Dataset download success ✅ ({time.time() - t:.1f}s), saved to {colorstr('bold', data_dir)}\n"
|
||||||
LOGGER.info(s)
|
LOGGER.info(s)
|
||||||
|
|
|
||||||
|
|
@ -195,7 +195,7 @@ class RF100Benchmark:
|
||||||
(shutil.rmtree("rf-100"), os.mkdir("rf-100")) if os.path.exists("rf-100") else os.mkdir("rf-100")
|
(shutil.rmtree("rf-100"), os.mkdir("rf-100")) if os.path.exists("rf-100") else os.mkdir("rf-100")
|
||||||
os.chdir("rf-100")
|
os.chdir("rf-100")
|
||||||
os.mkdir("ultralytics-benchmarks")
|
os.mkdir("ultralytics-benchmarks")
|
||||||
safe_download("https://github.com/ultralytics/yolov5/releases/download/v1.0/datasets_links.txt")
|
safe_download("https://github.com/ultralytics/assets/releases/download/v0.0.0/datasets_links.txt")
|
||||||
|
|
||||||
with open(ds_link_txt, "r") as file:
|
with open(ds_link_txt, "r") as file:
|
||||||
for line in file:
|
for line in file:
|
||||||
|
|
|
||||||
|
|
@ -315,7 +315,7 @@ def check_font(font="Arial.ttf"):
|
||||||
return matches[0]
|
return matches[0]
|
||||||
|
|
||||||
# Download to USER_CONFIG_DIR if missing
|
# Download to USER_CONFIG_DIR if missing
|
||||||
url = f"https://github.com/ultralytics/yolov5/releases/download/v1.0/{name}"
|
url = f"https://github.com/ultralytics/assets/releases/download/v0.0.0/{name}"
|
||||||
if downloads.is_url(url, check=True):
|
if downloads.is_url(url, check=True):
|
||||||
downloads.safe_download(url=url, file=file)
|
downloads.safe_download(url=url, file=file)
|
||||||
return file
|
return file
|
||||||
|
|
|
||||||
|
|
@ -194,14 +194,12 @@ def unzip_file(file, path=None, exclude=(".DS_Store", "__MACOSX"), exist_ok=Fals
|
||||||
return path # return unzip dir
|
return path # return unzip dir
|
||||||
|
|
||||||
|
|
||||||
def check_disk_space(
|
def check_disk_space(url="https://ultralytics.com/assets/coco8.zip", path=Path.cwd(), sf=1.5, hard=True):
|
||||||
url="https://github.com/ultralytics/yolov5/releases/download/v1.0/coco8.zip", path=Path.cwd(), sf=1.5, hard=True
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Check if there is sufficient disk space to download and store a file.
|
Check if there is sufficient disk space to download and store a file.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
url (str, optional): The URL to the file. Defaults to 'https://ultralytics.com/assets/coco8.zip'.
|
url (str, optional): The URL to the file. Defaults to 'https://github.com/ultralytics/assets/releases/download/v0.0.0/coco8.zip'.
|
||||||
path (str | Path, optional): The path or drive to check the available free space on.
|
path (str | Path, optional): The path or drive to check the available free space on.
|
||||||
sf (float, optional): Safety factor, the multiplier for the required free space. Defaults to 2.0.
|
sf (float, optional): Safety factor, the multiplier for the required free space. Defaults to 2.0.
|
||||||
hard (bool, optional): Whether to throw an error or not on insufficient disk space. Defaults to True.
|
hard (bool, optional): Whether to throw an error or not on insufficient disk space. Defaults to True.
|
||||||
|
|
@ -322,7 +320,11 @@ def safe_download(
|
||||||
if "://" not in str(url) and Path(url).is_file(): # URL exists ('://' check required in Windows Python<3.10)
|
if "://" not in str(url) and Path(url).is_file(): # URL exists ('://' check required in Windows Python<3.10)
|
||||||
f = Path(url) # filename
|
f = Path(url) # filename
|
||||||
elif not f.is_file(): # URL and file do not exist
|
elif not f.is_file(): # URL and file do not exist
|
||||||
desc = f"Downloading {url if gdrive else clean_url(url)} to '{f}'"
|
uri = (url if gdrive else clean_url(url)).replace( # cleaned and aliased url
|
||||||
|
"https://github.com/ultralytics/assets/releases/download/v0.0.0/",
|
||||||
|
"https://ultralytics.com/assets/", # assets alias
|
||||||
|
)
|
||||||
|
desc = f"Downloading {uri} to '{f}'"
|
||||||
LOGGER.info(f"{desc}...")
|
LOGGER.info(f"{desc}...")
|
||||||
f.parent.mkdir(parents=True, exist_ok=True) # make directory if missing
|
f.parent.mkdir(parents=True, exist_ok=True) # make directory if missing
|
||||||
check_disk_space(url, path=f.parent)
|
check_disk_space(url, path=f.parent)
|
||||||
|
|
@ -356,10 +358,10 @@ def safe_download(
|
||||||
f.unlink() # remove partial downloads
|
f.unlink() # remove partial downloads
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if i == 0 and not is_online():
|
if i == 0 and not is_online():
|
||||||
raise ConnectionError(emojis(f"❌ Download failure for {url}. Environment is not online.")) from e
|
raise ConnectionError(emojis(f"❌ Download failure for {uri}. Environment is not online.")) from e
|
||||||
elif i >= retry:
|
elif i >= retry:
|
||||||
raise ConnectionError(emojis(f"❌ Download failure for {url}. Retry limit reached.")) from e
|
raise ConnectionError(emojis(f"❌ Download failure for {uri}. Retry limit reached.")) from e
|
||||||
LOGGER.warning(f"⚠️ Download failure, retrying {i + 1}/{retry} {url}...")
|
LOGGER.warning(f"⚠️ Download failure, retrying {i + 1}/{retry} {uri}...")
|
||||||
|
|
||||||
if unzip and f.exists() and f.suffix in {"", ".zip", ".tar", ".gz"}:
|
if unzip and f.exists() and f.suffix in {"", ".zip", ".tar", ".gz"}:
|
||||||
from zipfile import is_zipfile
|
from zipfile import is_zipfile
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue