diff --git a/docs/en/guides/streamlit-live-inference.md b/docs/en/guides/streamlit-live-inference.md index 708e1b20..68fbe925 100644 --- a/docs/en/guides/streamlit-live-inference.md +++ b/docs/en/guides/streamlit-live-inference.md @@ -45,7 +45,7 @@ Streamlit makes it simple to build and deploy interactive web applications. Comb ```bash yolo solutions inference - yolo solutions inference model="path/to/model/file.pt" + yolo solutions inference model="path/to/model.pt" ``` === "Python" diff --git a/ultralytics/engine/model.py b/ultralytics/engine/model.py index 8affd958..d5d4db26 100644 --- a/ultralytics/engine/model.py +++ b/ultralytics/engine/model.py @@ -2,7 +2,7 @@ import inspect from pathlib import Path -from typing import Dict, List, Union +from typing import Any, Dict, List, Union import numpy as np import torch @@ -152,7 +152,7 @@ class Model(nn.Module): self, source: Union[str, Path, int, Image.Image, list, tuple, np.ndarray, torch.Tensor] = None, stream: bool = False, - **kwargs, + **kwargs: Any, ) -> list: """ Alias for the predict method, enabling the model instance to be callable for predictions. @@ -165,7 +165,7 @@ class Model(nn.Module): the image(s) to make predictions on. Can be a file path, URL, PIL image, numpy array, PyTorch tensor, or a list/tuple of these. stream (bool): If True, treat the input source as a continuous stream for predictions. - **kwargs (Any): Additional keyword arguments to configure the prediction process. + **kwargs: Additional keyword arguments to configure the prediction process. Returns: (List[ultralytics.engine.results.Results]): A list of prediction results, each encapsulated in a @@ -466,7 +466,7 @@ class Model(nn.Module): self, source: Union[str, Path, int, list, tuple, np.ndarray, torch.Tensor] = None, stream: bool = False, - **kwargs, + **kwargs: Any, ) -> list: """ Generates image embeddings based on the provided source. @@ -478,7 +478,7 @@ class Model(nn.Module): source (str | Path | int | List | Tuple | np.ndarray | torch.Tensor): The source of the image for generating embeddings. Can be a file path, URL, PIL image, numpy array, etc. stream (bool): If True, predictions are streamed. - **kwargs (Any): Additional keyword arguments for configuring the embedding process. + **kwargs: Additional keyword arguments for configuring the embedding process. Returns: (List[torch.Tensor]): A list containing the image embeddings. @@ -501,7 +501,7 @@ class Model(nn.Module): source: Union[str, Path, int, Image.Image, list, tuple, np.ndarray, torch.Tensor] = None, stream: bool = False, predictor=None, - **kwargs, + **kwargs: Any, ) -> List[Results]: """ Performs predictions on the given image source using the YOLO model. @@ -517,7 +517,7 @@ class Model(nn.Module): stream (bool): If True, treats the input source as a continuous stream for predictions. predictor (BasePredictor | None): An instance of a custom predictor class for making predictions. If None, the method uses a default predictor. - **kwargs (Any): Additional keyword arguments for configuring the prediction process. + **kwargs: Additional keyword arguments for configuring the prediction process. Returns: (List[ultralytics.engine.results.Results]): A list of prediction results, each encapsulated in a @@ -562,7 +562,7 @@ class Model(nn.Module): source: Union[str, Path, int, list, tuple, np.ndarray, torch.Tensor] = None, stream: bool = False, persist: bool = False, - **kwargs, + **kwargs: Any, ) -> List[Results]: """ Conducts object tracking on the specified input source using the registered trackers. @@ -576,7 +576,7 @@ class Model(nn.Module): tracking. Can be a file path, URL, or video stream. stream (bool): If True, treats the input source as a continuous video stream. Defaults to False. persist (bool): If True, persists trackers between different calls to this method. Defaults to False. - **kwargs (Any): Additional keyword arguments for configuring the tracking process. + **kwargs: Additional keyword arguments for configuring the tracking process. Returns: (List[ultralytics.engine.results.Results]): A list of tracking results, each a Results object. @@ -607,7 +607,7 @@ class Model(nn.Module): def val( self, validator=None, - **kwargs, + **kwargs: Any, ): """ Validates the model using a specified dataset and validation configuration. @@ -619,7 +619,7 @@ class Model(nn.Module): Args: validator (ultralytics.engine.validator.BaseValidator | None): An instance of a custom validator class for validating the model. - **kwargs (Any): Arbitrary keyword arguments for customizing the validation process. + **kwargs: Arbitrary keyword arguments for customizing the validation process. Returns: (ultralytics.utils.metrics.DetMetrics): Validation metrics obtained from the validation process. @@ -642,7 +642,7 @@ class Model(nn.Module): def benchmark( self, - **kwargs, + **kwargs: Any, ): """ Benchmarks the model across various export formats to evaluate performance. @@ -653,7 +653,7 @@ class Model(nn.Module): defaults, and any additional user-provided keyword arguments. Args: - **kwargs (Any): Arbitrary keyword arguments to customize the benchmarking process. These are combined with + **kwargs: Arbitrary keyword arguments to customize the benchmarking process. These are combined with default configurations, model-specific arguments, and method defaults. Common options include: - data (str): Path to the dataset for benchmarking. - imgsz (int | List[int]): Image size for benchmarking. @@ -691,7 +691,7 @@ class Model(nn.Module): def export( self, - **kwargs, + **kwargs: Any, ) -> str: """ Exports the model to a different format suitable for deployment. @@ -701,7 +701,7 @@ class Model(nn.Module): defaults, and any additional arguments provided. Args: - **kwargs (Dict): Arbitrary keyword arguments to customize the export process. These are combined with + **kwargs: Arbitrary keyword arguments to customize the export process. These are combined with the model's overrides and method defaults. Common arguments include: format (str): Export format (e.g., 'onnx', 'engine', 'coreml'). half (bool): Export model in half-precision. @@ -740,7 +740,7 @@ class Model(nn.Module): def train( self, trainer=None, - **kwargs, + **kwargs: Any, ): """ Trains the model using the specified dataset and training configuration. @@ -755,7 +755,7 @@ class Model(nn.Module): Args: trainer (BaseTrainer | None): Custom trainer instance for model training. If None, uses default. - **kwargs (Any): Arbitrary keyword arguments for training configuration. Common options include: + **kwargs: Arbitrary keyword arguments for training configuration. Common options include: data (str): Path to dataset configuration file. epochs (int): Number of training epochs. batch_size (int): Batch size for training. @@ -816,8 +816,8 @@ class Model(nn.Module): self, use_ray=False, iterations=10, - *args, - **kwargs, + *args: Any, + **kwargs: Any, ): """ Conducts hyperparameter tuning for the model, with an option to use Ray Tune. @@ -830,8 +830,8 @@ class Model(nn.Module): Args: use_ray (bool): If True, uses Ray Tune for hyperparameter tuning. Defaults to False. iterations (int): The number of tuning iterations to perform. Defaults to 10. - *args (List): Variable length argument list for additional arguments. - **kwargs (Dict): Arbitrary keyword arguments. These are combined with the model's overrides and defaults. + *args: Variable length argument list for additional arguments. + **kwargs: Arbitrary keyword arguments. These are combined with the model's overrides and defaults. Returns: (Dict): A dictionary containing the results of the hyperparameter search. diff --git a/ultralytics/solutions/streamlit_inference.py b/ultralytics/solutions/streamlit_inference.py index cf09269c..926bfbc7 100644 --- a/ultralytics/solutions/streamlit_inference.py +++ b/ultralytics/solutions/streamlit_inference.py @@ -2,6 +2,7 @@ import io import time +from typing import Any import cv2 @@ -37,16 +38,16 @@ class Inference: inference: Performs real-time object detection inference. Examples: - >>> inf = solutions.Inference(model="path/to/model/file.pt") # Model is not necessary argument. + >>> inf = solutions.Inference(model="path/to/model.pt") # Model is not necessary argument. >>> inf.inference() """ - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any): """ Initializes the Inference class, checking Streamlit requirements and setting up the model path. Args: - **kwargs (Dict): Additional keyword arguments for model configuration. + **kwargs (Any): Additional keyword arguments for model configuration. """ check_requirements("streamlit>=1.29.0") # scope imports for faster ultralytics package load speeds import streamlit as st