Raspberry Pi 5 self-hosted CI (#8828)

Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
Co-authored-by: Ultralytics AI Assistant <135830346+UltralyticsAssistant@users.noreply.github.com>
This commit is contained in:
Lakshantha Dissanayake 2024-04-16 20:36:21 -07:00 committed by GitHub
parent c54b013188
commit b76400a06b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 45 additions and 11 deletions

View file

@ -33,6 +33,10 @@ on:
description: "Run GPU"
default: false
type: boolean
raspberrypi:
description: "Run Raspberry Pi"
default: false
type: boolean
conda:
description: "Run Conda"
default: false
@ -221,6 +225,28 @@ jobs:
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
RaspberryPi:
if: github.repository == 'ultralytics/ultralytics' && (github.event_name == 'schedule' || github.event.inputs.raspberrypi == 'true')
timeout-minutes: 60
runs-on: raspberry-pi
steps:
- name: Activate Virtual Environment
run: |
python3.11 -m venv env
source env/bin/activate
echo PATH=$PATH >> $GITHUB_ENV
- name: Install requirements
run: |
python -m pip install --upgrade pip wheel
pip install -e . pytest mlflow pycocotools "ray[tune]<=2.9.3" --extra-index-url https://download.pytorch.org/whl/cpu
- name: Check environment
run: |
yolo checks
pip list
- name: Pytest tests
run: |
pytest --slow tests/
Conda:
if: github.repository == 'ultralytics/ultralytics' && (github.event_name == 'schedule' || github.event.inputs.conda == 'true')
runs-on: ${{ matrix.os }}
@ -281,11 +307,11 @@ jobs:
Summary:
runs-on: ubuntu-latest
needs: [HUB, Benchmarks, Tests, GPU, Conda] # Add job names that you want to check for failure
needs: [HUB, Benchmarks, Tests, GPU, RaspberryPi, Conda] # Add job names that you want to check for failure
if: always() # This ensures the job runs even if previous jobs fail
steps:
- name: Check for failure and notify
if: (needs.HUB.result == 'failure' || needs.Benchmarks.result == 'failure' || needs.Tests.result == 'failure' || needs.GPU.result == 'failure' || needs.Conda.result == 'failure') && github.repository == 'ultralytics/ultralytics' && (github.event_name == 'schedule' || github.event_name == 'push')
if: (needs.HUB.result == 'failure' || needs.Benchmarks.result == 'failure' || needs.Tests.result == 'failure' || needs.GPU.result == 'failure' || needs.RaspberryPi.result == 'failure' || needs.Conda.result == 'failure' ) && github.repository == 'ultralytics/ultralytics' && (github.event_name == 'schedule' || github.event_name == 'push')
uses: slackapi/slack-github-action@v1.25.0
with:
payload: |

View file

@ -101,6 +101,7 @@ export = [
"openvino>=2024.0.0", # OpenVINO export
"tensorflow<=2.13.1; python_version <= '3.11'", # TF bug https://github.com/ultralytics/ultralytics/issues/5161
"tensorflowjs>=3.9.0; python_version <= '3.11'", # TF.js export, automatically installs tensorflow
"flatbuffers>=23.5.26,<100", # update old 'flatbuffers' included inside tensorflow package
"numpy==1.23.5; platform_machine == 'aarch64'", # fix error: `np.bool` was a deprecated alias for the builtin `bool` when using TensorRT models on NVIDIA Jetson
"h5py!=3.11.0; platform_machine == 'aarch64'", # fix h5py build issues due to missing aarch64 wheels in 3.11 release
]

View file

@ -27,6 +27,7 @@ from ultralytics.utils import (
Retry,
checks,
is_dir_writeable,
IS_RASPBERRYPI,
)
from ultralytics.utils.downloads import download
from ultralytics.utils.torch_utils import TORCH_1_9, TORCH_1_13
@ -221,10 +222,11 @@ def test_export_openvino():
YOLO(f)(SOURCE) # exported model inference
@pytest.mark.skipif(WINDOWS, reason="CoreML not supported on Windows") # RuntimeError: BlobWriter not loaded
@pytest.mark.skipif(IS_RASPBERRYPI, reason="CoreML not supported on Raspberry Pi")
@pytest.mark.skipif(checks.IS_PYTHON_3_12, reason="CoreML not supported in Python 3.12")
def test_export_coreml():
"""Test exporting the YOLO model to CoreML format."""
if not WINDOWS: # RuntimeError: BlobWriter not loaded with coremltools 7.0 on windows
if MACOS:
f = YOLO(MODEL).export(format="coreml")
YOLO(f)(SOURCE) # model prediction only supported on macOS for nms=False models

View file

@ -14,7 +14,7 @@ import torch
import torch.nn as nn
from PIL import Image
from ultralytics.utils import ARM64, LINUX, LOGGER, ROOT, yaml_load
from ultralytics.utils import ARM64, IS_JETSON, IS_RASPBERRYPI, LINUX, LOGGER, ROOT, yaml_load
from ultralytics.utils.checks import check_requirements, check_suffix, check_version, check_yaml
from ultralytics.utils.downloads import attempt_download_asset, is_url
@ -183,6 +183,9 @@ class AutoBackend(nn.Module):
elif onnx:
LOGGER.info(f"Loading {w} for ONNX Runtime inference...")
check_requirements(("onnx", "onnxruntime-gpu" if cuda else "onnxruntime"))
if IS_RASPBERRYPI or IS_JETSON:
# Fix error: module 'numpy.linalg._umath_linalg' has no attribute '_ilp64' when exporting to Tensorflow SavedModel on RPi and Jetson
check_requirements("numpy==1.23.5")
import onnxruntime
providers = ["CUDAExecutionProvider", "CPUExecutionProvider"] if cuda else ["CPUExecutionProvider"]

View file

@ -35,7 +35,7 @@ import torch.cuda
from ultralytics import YOLO, YOLOWorld
from ultralytics.cfg import TASK2DATA, TASK2METRIC
from ultralytics.engine.exporter import export_formats
from ultralytics.utils import ASSETS, LINUX, LOGGER, MACOS, TQDM, WEIGHTS_DIR
from ultralytics.utils import ARM64, ASSETS, IS_JETSON, IS_RASPBERRYPI, LINUX, LOGGER, MACOS, TQDM, WEIGHTS_DIR
from ultralytics.utils.checks import IS_PYTHON_3_12, check_requirements, check_yolo
from ultralytics.utils.files import file_size
from ultralytics.utils.torch_utils import select_device
@ -83,8 +83,10 @@ def benchmark(
emoji, filename = "", None # export defaults
try:
# Checks
if i == 5: # CoreML
assert not (IS_RASPBERRYPI or IS_JETSON), "CoreML export not supported on Raspberry Pi or NVIDIA Jetson"
if i == 9: # Edge TPU
assert LINUX, "Edge TPU export only supported on Linux"
assert LINUX and not ARM64, "Edge TPU export only supported on non-aarch64 Linux"
elif i == 7: # TF GraphDef
assert model.task != "obb", "TensorFlow GraphDef not supported for OBB task"
elif i in {5, 10}: # CoreML and TF.js