ultralytics 8.0.195 NVIDIA Triton Inference Server support (#5257)
Co-authored-by: TheConstant3 <46416203+TheConstant3@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
parent
40e3923cfc
commit
c7aa83da31
21 changed files with 349 additions and 98 deletions
|
|
@ -81,6 +81,12 @@ class Model(nn.Module):
|
|||
self.session = HUBTrainingSession(model)
|
||||
model = self.session.model_file
|
||||
|
||||
# Check if Triton Server model
|
||||
elif self.is_triton_model(model):
|
||||
self.model = model
|
||||
self.task = task
|
||||
return
|
||||
|
||||
# Load or create new YOLO model
|
||||
suffix = Path(model).suffix
|
||||
if not suffix and Path(model).stem in GITHUB_ASSETS_STEMS:
|
||||
|
|
@ -94,6 +100,13 @@ class Model(nn.Module):
|
|||
"""Calls the 'predict' function with given arguments to perform object detection."""
|
||||
return self.predict(source, stream, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def is_triton_model(model):
|
||||
"""Is model a Triton Server URL string, i.e. <scheme>://<netloc>/<endpoint>/<task_name>"""
|
||||
from urllib.parse import urlsplit
|
||||
url = urlsplit(model)
|
||||
return url.netloc and url.path and url.scheme in {'http', 'grfc'}
|
||||
|
||||
@staticmethod
|
||||
def is_hub_model(model):
|
||||
"""Check if the provided model is a HUB model."""
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue