ultralytics 8.2.30 automated tags and release notes (#13164)

Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
This commit is contained in:
Ivor Zhu 2024-06-09 20:33:07 -04:00 committed by GitHub
parent 6367ff4748
commit 59eedcc3fa
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
29 changed files with 135 additions and 22 deletions

View file

@ -25,6 +25,10 @@ jobs:
uses: actions/checkout@v4
with:
fetch-depth: "0" # pulls all commits (needed correct last updated dates in Docs)
- name: Git config
run: |
git config --global user.name "UltralyticsAssistant"
git config --global user.email "web@ultralytics.com"
- name: Set up Python environment
uses: actions/setup-python@v5
with:
@ -40,26 +44,75 @@ jobs:
import os
import ultralytics
from ultralytics.utils.checks import check_latest_pypi_version
v_local = tuple(map(int, ultralytics.__version__.split('.')))
v_pypi = tuple(map(int, check_latest_pypi_version().split('.')))
print(f'Local version is {v_local}')
print(f'PyPI version is {v_pypi}')
d = [a - b for a, b in zip(v_local, v_pypi)] # diff
increment_patch = (d[0] == d[1] == 0) and (0 < d[2] < 3) # publish if patch version increments by 1 or 2
increment_minor = (d[0] == 0) and (d[1] == 1) and v_local[2] == 0 # publish if minor version increments
increment = increment_patch or increment_minor
os.system(f'echo "increment={increment}" >> $GITHUB_OUTPUT')
os.system(f'echo "version={ultralytics.__version__}" >> $GITHUB_OUTPUT')
if increment:
print('Local version is higher than PyPI version. Publishing new version to PyPI ✅.')
id: check_pypi
- name: Publish new tag
if: (github.event_name == 'push' || github.event.inputs.pypi == 'true') && steps.check_pypi.outputs.increment == 'True'
run: |
git tag -a "v${{ steps.check_pypi.outputs.version }}" -m "$(git log -1 --pretty=%B)" # i.e. "v0.1.2 commit message"
git push origin "v${{ steps.check_pypi.outputs.version }}"
- name: Publish new release
if: (github.event_name == 'push' || github.event.inputs.pypi == 'true') && steps.check_pypi.outputs.increment == 'True'
env:
OPENAI_AZURE_API_KEY: ${{ secrets.OPENAI_AZURE_API_KEY }}
OPENAI_AZURE_ENDPOINT: ${{ secrets.OPENAI_AZURE_ENDPOINT }}
OPENAI_AZURE_API_VERSION: ${{ secrets.OPENAI_AZURE_API_VERSION }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CURRENT_TAG: ${{ steps.check_pypi.outputs.version }}
PREVIOUS_TAG: ${{ steps.check_pypi.outputs.previous_version }}
shell: python
run: |
import openai
import os
import requests
import json
import subprocess
latest_tag = f"v{os.getenv('CURRENT_TAG')}"
previous_tag = f"v{os.getenv('PREVIOUS_TAG')}"
repo = 'ultralytics/ultralytics'
headers = {"Authorization": f"token {os.getenv('GITHUB_TOKEN')}", "Accept": "application/vnd.github.v3.diff"}
url = f"https://api.github.com/repos/{repo}/compare/{previous_tag}...{latest_tag}"
response = requests.get(url, headers=headers)
diff = response.text if response.status_code == 200 else f"Failed to get diff: {response.content}"
client = openai.AzureOpenAI(api_key=os.getenv('OPENAI_AZURE_API_KEY'), api_version=os.getenv('OPENAI_AZURE_API_VERSION'), azure_endpoint=os.getenv('OPENAI_AZURE_ENDPOINT'))
messages = [
{"role": "system",
"content": "You are an Ultralytics AI assistant skilled in software development and technical communication. Your task is to summarize GitHub releases from Ultralytics in a way that is accurate, concise, and understandable to both expert developers and non-expert users. Focus on highlighting the key changes and their impact in simple, concise terms."},
{"role": "user",
"content": f"Summarize the updates made on the Ultralytics '{latest_tag}' tag, focusing on major changes, their purpose, and potential impact. Keep the summary clear and concise, suitable for a broad audience. Add emojis to enliven the summary. Reply directly with a summary along these example guidelines, though feel free to adjust as appropriate:\n\n"
f"### 🌟 Summary (single-line synopsis)\n"
f"### 📊 Key Changes (bullet points highlighting any major changes)\n"
f"### 🎯 Purpose & Impact (bullet points explaining any benefits and potential impact to users)\n"
f"\n\nHere's the release diff:\n\n{diff[:96000]}",
}
]
summary = client.chat.completions.create(model="gpt-4o-2024-05-13", messages=messages).choices[0].message.content.strip()
commit_message = subprocess.run(['git', 'log', '-1', '--pretty=%B'], check=True, text=True, capture_output=True).stdout.split("\n")[0].strip()
release = {
'tag_name': latest_tag,
'name': f"{latest_tag} - {commit_message}",
'body': summary,
'draft': False,
'prerelease': False
}
response = requests.post(f"https://api.github.com/repos/{repo}/releases", headers=headers, data=json.dumps(release))
if response.status_code == 201:
print(f'Successfully created release {latest_tag}')
else:
print(f'Failed to create release {latest_tag}: {response.content}')
- name: Publish to PyPI
continue-on-error: true
if: (github.event_name == 'push' || github.event.inputs.pypi == 'true') && steps.check_pypi.outputs.increment == 'True'
@ -77,8 +130,6 @@ jobs:
run: |
export JUPYTER_PLATFORM_DIRS=1
python docs/build_docs.py
git config --global user.name "Glenn Jocher"
git config --global user.email "glenn.jocher@ultralytics.com"
git clone https://github.com/ultralytics/docs.git docs-repo
cd docs-repo
git checkout gh-pages || git checkout -b gh-pages

View file

@ -28,6 +28,7 @@ This guide provides a comprehensive overview of three fundamental types of data
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8s.pt")
@ -76,6 +77,7 @@ This guide provides a comprehensive overview of three fundamental types of data
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8s.pt")
@ -136,6 +138,7 @@ This guide provides a comprehensive overview of three fundamental types of data
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8s.pt")
@ -185,6 +188,7 @@ This guide provides a comprehensive overview of three fundamental types of data
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8s.pt")
@ -234,52 +238,53 @@ This guide provides a comprehensive overview of three fundamental types of data
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8s.pt")
cap = cv2.VideoCapture("path/to/video/file.mp4")
assert cap.isOpened(), "Error reading video file"
w, h, fps = (int(cap.get(x)) for x in (cv2.CAP_PROP_FRAME_WIDTH, cv2.CAP_PROP_FRAME_HEIGHT, cv2.CAP_PROP_FPS))
out = cv2.VideoWriter("area_plot.avi", cv2.VideoWriter_fourcc(*"MJPG"), fps, (w, h))
analytics = solutions.Analytics(
type="area",
writer=out,
im0_shape=(w, h),
view_img=True,
)
clswise_count = {}
frame_count = 0
while cap.isOpened():
success, frame = cap.read()
if success:
frame_count += 1
results = model.track(frame, persist=True, verbose=True)
if results[0].boxes.id is not None:
boxes = results[0].boxes.xyxy.cpu()
clss = results[0].boxes.cls.cpu().tolist()
for box, cls in zip(boxes, clss):
if model.names[int(cls)] in clswise_count:
clswise_count[model.names[int(cls)]] += 1
else:
clswise_count[model.names[int(cls)]] = 1
analytics.update_area(frame_count, clswise_count)
clswise_count = {}
if cv2.waitKey(1) & 0xFF == ord("q"):
break
else:
break
cap.release()
out.release()
cv2.destroyAllWindows()
cv2.destroyAllWindows()
```
### Argument `Analytics`

View file

@ -43,6 +43,7 @@ Measuring the gap between two objects is known as distance calculation within a
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n.pt")

View file

@ -45,6 +45,7 @@ A heatmap generated with [Ultralytics YOLOv8](https://github.com/ultralytics/ult
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n.pt")
@ -82,6 +83,7 @@ A heatmap generated with [Ultralytics YOLOv8](https://github.com/ultralytics/ult
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n.pt")
@ -121,6 +123,7 @@ A heatmap generated with [Ultralytics YOLOv8](https://github.com/ultralytics/ult
=== "Polygon Counting"
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n.pt")
@ -162,6 +165,7 @@ A heatmap generated with [Ultralytics YOLOv8](https://github.com/ultralytics/ult
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n.pt")
@ -203,6 +207,7 @@ A heatmap generated with [Ultralytics YOLOv8](https://github.com/ultralytics/ult
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8s.pt") # YOLOv8 custom/pretrained model
@ -227,6 +232,7 @@ A heatmap generated with [Ultralytics YOLOv8](https://github.com/ultralytics/ult
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n.pt")

View file

@ -40,6 +40,7 @@ There are two types of instance segmentation tracking available in the Ultralyti
```python
import cv2
from ultralytics import YOLO
from ultralytics.utils.plotting import Annotator, colors
@ -82,6 +83,7 @@ There are two types of instance segmentation tracking available in the Ultralyti
from collections import defaultdict
import cv2
from ultralytics import YOLO
from ultralytics.utils.plotting import Annotator, colors

View file

@ -21,6 +21,7 @@ After performing the [Segment Task](../tasks/segment.md), it's sometimes desirab
import cv2
import numpy as np
from ultralytics import YOLO
```
@ -272,6 +273,7 @@ from pathlib import Path
import cv2
import numpy as np
from ultralytics import YOLO
m = YOLO("yolov8n-seg.pt") # (4)!

View file

@ -69,6 +69,7 @@ Without further ado, let's dive in!
import pandas as pd
import yaml
from sklearn.model_selection import KFold
from ultralytics import YOLO
```

View file

@ -22,6 +22,7 @@ Object blurring with [Ultralytics YOLOv8](https://github.com/ultralytics/ultraly
```python
import cv2
from ultralytics import YOLO
from ultralytics.utils.plotting import Annotator, colors

View file

@ -52,6 +52,7 @@ Object counting with [Ultralytics YOLOv8](https://github.com/ultralytics/ultraly
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n.pt")
@ -93,6 +94,7 @@ Object counting with [Ultralytics YOLOv8](https://github.com/ultralytics/ultraly
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n.pt")
@ -134,6 +136,7 @@ Object counting with [Ultralytics YOLOv8](https://github.com/ultralytics/ultraly
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n.pt")
@ -175,6 +178,7 @@ Object counting with [Ultralytics YOLOv8](https://github.com/ultralytics/ultraly
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n.pt")

View file

@ -31,6 +31,7 @@ Object cropping with [Ultralytics YOLOv8](https://github.com/ultralytics/ultraly
import os
import cv2
from ultralytics import YOLO
from ultralytics.utils.plotting import Annotator, colors

View file

@ -60,6 +60,7 @@ Parking management with [Ultralytics YOLOv8](https://github.com/ultralytics/ultr
```python
import cv2
from ultralytics import solutions
# Path to json file, that created with above point selection app

View file

@ -28,6 +28,7 @@ Queue management using [Ultralytics YOLOv8](https://github.com/ultralytics/ultra
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n.pt")
@ -71,6 +72,7 @@ Queue management using [Ultralytics YOLOv8](https://github.com/ultralytics/ultra
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n.pt")

View file

@ -286,6 +286,7 @@ There are 2 methods of using the Raspberry Pi Camera to inference YOLOv8 models.
```python
import cv2
from picamera2 import Picamera2
from ultralytics import YOLO
# Initialize the Picamera2

View file

@ -38,6 +38,7 @@ from time import time
import cv2
import numpy as np
import torch
from ultralytics import YOLO
from ultralytics.utils.plotting import Annotator, colors
```

View file

@ -44,6 +44,7 @@ keywords: Ultralytics YOLOv8, speed estimation, object tracking, computer vision
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n.pt")

View file

@ -48,6 +48,7 @@ The VSCode compatible protocols for viewing images using the integrated terminal
import cv2 as cv
from sixel import SixelWriter
from ultralytics import YOLO
```
@ -111,6 +112,7 @@ import io
import cv2 as cv
from sixel import SixelWriter
from ultralytics import YOLO
# Load a model

View file

@ -23,6 +23,7 @@ keywords: VisionEye, YOLOv8, Ultralytics, object mapping, object tracking, dista
```python
import cv2
from ultralytics import YOLO
from ultralytics.utils.plotting import Annotator, colors
@ -66,6 +67,7 @@ keywords: VisionEye, YOLOv8, Ultralytics, object mapping, object tracking, dista
```python
import cv2
from ultralytics import YOLO
from ultralytics.utils.plotting import Annotator, colors
@ -112,6 +114,7 @@ keywords: VisionEye, YOLOv8, Ultralytics, object mapping, object tracking, dista
import math
import cv2
from ultralytics import YOLO
from ultralytics.utils.plotting import Annotator, colors

View file

@ -40,6 +40,7 @@ Monitoring workouts through pose estimation with [Ultralytics YOLOv8](https://gi
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n-pose.pt")
@ -72,6 +73,7 @@ Monitoring workouts through pose estimation with [Ultralytics YOLOv8](https://gi
```python
import cv2
from ultralytics import YOLO, solutions
model = YOLO("yolov8n-pose.pt")

View file

@ -50,6 +50,7 @@ When running inference on a 0-channel image, I get an error related to the dimen
```python
import torch
from ultralytics import YOLO
# Load the model

View file

@ -64,6 +64,7 @@ Before diving into the usage instructions, be sure to check out the range of [YO
```python
from clearml import Task
from ultralytics import YOLO
# Step 1: Creating a ClearML Task

View file

@ -57,6 +57,7 @@ This section provides the Python code used to create the Gradio interface with t
```python
import gradio as gr
import PIL.Image as Image
from ultralytics import ASSETS, YOLO
model = YOLO("yolov8n.pt")

View file

@ -403,6 +403,7 @@ Expand sections below for information on how these models were exported and test
```py
import cv2
from ultralytics import YOLO
model = YOLO("yolov8n.engine")

View file

@ -64,9 +64,10 @@ Before diving into the usage instructions for YOLOv8 model training with Weights
=== "Python"
```python
import wandb
from ultralytics import YOLO
from wandb.integration.ultralytics import add_wandb_callback
from ultralytics import YOLO
# Step 1: Initialize a Weights & Biases run
wandb.init(project="ultralytics", job_type="training")

View file

@ -178,6 +178,7 @@ Below are code examples for using each source type:
Run inference on an image opened with Python Imaging Library (PIL).
```python
from PIL import Image
from ultralytics import YOLO
# Load a pretrained YOLOv8n model
@ -195,6 +196,7 @@ Below are code examples for using each source type:
Run inference on an image read with OpenCV.
```python
import cv2
from ultralytics import YOLO
# Load a pretrained YOLOv8n model
@ -212,6 +214,7 @@ Below are code examples for using each source type:
Run inference on an image represented as a numpy array.
```python
import numpy as np
from ultralytics import YOLO
# Load a pretrained YOLOv8n model
@ -229,6 +232,7 @@ Below are code examples for using each source type:
Run inference on an image represented as a PyTorch tensor.
```python
import torch
from ultralytics import YOLO
# Load a pretrained YOLOv8n model
@ -246,6 +250,7 @@ Below are code examples for using each source type:
Run inference on a collection of images, URLs, videos and directories listed in a CSV file.
```python
import torch
from ultralytics import YOLO
# Load a pretrained YOLOv8n model
@ -673,6 +678,7 @@ The `plot()` method in `Results` objects facilitates visualization of prediction
```python
from PIL import Image
from ultralytics import YOLO
# Load a pretrained YOLOv8n model
@ -754,6 +760,7 @@ Here's a Python script using OpenCV (`cv2`) and YOLOv8 to run inference on video
```python
import cv2
from ultralytics import YOLO
# Load the YOLOv8 model

View file

@ -159,6 +159,7 @@ Here is a Python script using OpenCV (`cv2`) and YOLOv8 to run object tracking o
```python
import cv2
from ultralytics import YOLO
# Load the YOLOv8 model
@ -210,6 +211,7 @@ In the following example, we demonstrate how to utilize YOLOv8's tracking capabi
import cv2
import numpy as np
from ultralytics import YOLO
# Load the YOLOv8 model
@ -285,6 +287,7 @@ Finally, after all threads have completed their task, the windows displaying the
import threading
import cv2
from ultralytics import YOLO

View file

@ -127,6 +127,7 @@ Predict mode is used for making predictions using a trained YOLOv8 model on new
```python
import cv2
from PIL import Image
from ultralytics import YOLO
model = YOLO("model.pt")

View file

@ -130,6 +130,7 @@ If you have a dataset that uses the [segmentation dataset format](../datasets/se
```python
import numpy as np
from ultralytics.utils.ops import segments2boxes
segments = np.array(
@ -194,6 +195,7 @@ Convert a single polygon (as list) to a binary mask of the specified image size.
```python
import numpy as np
from ultralytics.data.utils import polygon2mask
imgsz = (1080, 810)
@ -215,6 +217,7 @@ To manage bounding box data, the `Bboxes` class will help to convert between box
```python
import numpy as np
from ultralytics.utils.instance import Bboxes
boxes = Bboxes(
@ -259,6 +262,7 @@ When scaling and image up or down, corresponding bounding box coordinates can be
```{ .py .annotate }
import cv2 as cv
import numpy as np
from ultralytics.utils.ops import scale_boxes
image = cv.imread("ultralytics/assets/bus.jpg")
@ -307,6 +311,7 @@ Convert bounding box coordinates from (x1, y1, x2, y2) format to (x, y, width, h
```python
import numpy as np
from ultralytics.utils.ops import xyxy2xywh
xyxy_boxes = np.array(
@ -359,6 +364,7 @@ Ultralytics includes an Annotator class that can be used to annotate any kind of
```{ .py .annotate }
import cv2 as cv
import numpy as np
from ultralytics.utils.plotting import Annotator, colors
names = { # (1)!
@ -402,6 +408,7 @@ image_with_bboxes = ann.result()
```python
import cv2 as cv
import numpy as np
from ultralytics.utils.plotting import Annotator, colors
obb_names = {10: "small vehicle"}

View file

@ -1,6 +1,6 @@
# Ultralytics YOLO 🚀, AGPL-3.0 license
__version__ = "8.2.29"
__version__ = "8.2.30"
import os

View file

@ -125,6 +125,7 @@ Here is a Python script using OpenCV (`cv2`) and YOLOv8 to run object tracking o
```python
import cv2
from ultralytics import YOLO
# Load the YOLOv8 model
@ -176,6 +177,7 @@ from collections import defaultdict
import cv2
import numpy as np
from ultralytics import YOLO
# Load the YOLOv8 model
@ -257,6 +259,7 @@ Finally, after all threads have completed their task, the windows displaying the
import threading
import cv2
from ultralytics import YOLO