ultralytics 8.3.29 Sony IMX500 export (#14878)

Signed-off-by: UltralyticsAssistant <web@ultralytics.com>
Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
Co-authored-by: Ultralytics Assistant <135830346+UltralyticsAssistant@users.noreply.github.com>
Co-authored-by: Francesco Mattioli <Francesco.mttl@gmail.com>
Co-authored-by: Lakshantha Dissanayake <lakshantha@ultralytics.com>
Co-authored-by: Lakshantha Dissanayake <lakshanthad@yahoo.com>
Co-authored-by: Chizkiyahu Raful <37312901+Chizkiyahu@users.noreply.github.com>
Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
Co-authored-by: Muhammad Rizwan Munawar <muhammadrizwanmunawar123@gmail.com>
Co-authored-by: Mohammed Yasin <32206511+Y-T-G@users.noreply.github.com>
This commit is contained in:
Laughing 2024-11-11 21:20:21 +08:00 committed by GitHub
parent 2c6cd68144
commit 0fa1d7d5a6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
16 changed files with 281 additions and 17 deletions

View file

@ -123,6 +123,7 @@ class AutoBackend(nn.Module):
paddle,
mnn,
ncnn,
imx,
triton,
) = self._model_type(w)
fp16 &= pt or jit or onnx or xml or engine or nn_module or triton # FP16
@ -182,8 +183,8 @@ class AutoBackend(nn.Module):
check_requirements("opencv-python>=4.5.4")
net = cv2.dnn.readNetFromONNX(w)
# ONNX Runtime
elif onnx:
# ONNX Runtime and IMX
elif onnx or imx:
LOGGER.info(f"Loading {w} for ONNX Runtime inference...")
check_requirements(("onnx", "onnxruntime-gpu" if cuda else "onnxruntime"))
if IS_RASPBERRYPI or IS_JETSON:
@ -199,7 +200,22 @@ class AutoBackend(nn.Module):
device = torch.device("cpu")
cuda = False
LOGGER.info(f"Preferring ONNX Runtime {providers[0]}")
session = onnxruntime.InferenceSession(w, providers=providers)
if onnx:
session = onnxruntime.InferenceSession(w, providers=providers)
else:
check_requirements(
["model-compression-toolkit==2.1.1", "sony-custom-layers[torch]==0.2.0", "onnxruntime-extensions"]
)
w = next(Path(w).glob("*.onnx"))
LOGGER.info(f"Loading {w} for ONNX IMX inference...")
import mct_quantizers as mctq
from sony_custom_layers.pytorch.object_detection import nms_ort # noqa
session = onnxruntime.InferenceSession(
w, mctq.get_ort_session_options(), providers=["CPUExecutionProvider"]
)
task = "detect"
output_names = [x.name for x in session.get_outputs()]
metadata = session.get_modelmeta().custom_metadata_map
dynamic = isinstance(session.get_outputs()[0].shape[0], str)
@ -520,7 +536,7 @@ class AutoBackend(nn.Module):
y = self.net.forward()
# ONNX Runtime
elif self.onnx:
elif self.onnx or self.imx:
if self.dynamic:
im = im.cpu().numpy() # torch to numpy
y = self.session.run(self.output_names, {self.session.get_inputs()[0].name: im})
@ -537,6 +553,9 @@ class AutoBackend(nn.Module):
)
self.session.run_with_iobinding(self.io)
y = self.bindings
if self.imx:
# boxes, conf, cls
y = np.concatenate([y[0], y[1][:, :, None], y[2][:, :, None]], axis=-1)
# OpenVINO
elif self.xml: