Add docformatter to pre-commit (#5279)
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Burhan <62214284+Burhan-Q@users.noreply.github.com>
This commit is contained in:
parent
c7aa83da31
commit
7517667a33
90 changed files with 1396 additions and 497 deletions
|
|
@ -1,7 +1,5 @@
|
|||
# Ultralytics YOLO 🚀, AGPL-3.0 license
|
||||
"""
|
||||
Model validation metrics
|
||||
"""
|
||||
"""Model validation metrics."""
|
||||
|
||||
import math
|
||||
import warnings
|
||||
|
|
@ -195,7 +193,7 @@ class ConfusionMatrix:
|
|||
|
||||
def process_cls_preds(self, preds, targets):
|
||||
"""
|
||||
Update confusion matrix for classification task
|
||||
Update confusion matrix for classification task.
|
||||
|
||||
Args:
|
||||
preds (Array[N, min(nc,5)]): Predicted class labels.
|
||||
|
|
@ -308,9 +306,7 @@ class ConfusionMatrix:
|
|||
on_plot(plot_fname)
|
||||
|
||||
def print(self):
|
||||
"""
|
||||
Print the confusion matrix to the console.
|
||||
"""
|
||||
"""Print the confusion matrix to the console."""
|
||||
for i in range(self.nc + 1):
|
||||
LOGGER.info(' '.join(map(str, self.matrix[i])))
|
||||
|
||||
|
|
@ -440,7 +436,6 @@ def ap_per_class(tp,
|
|||
f1 (np.ndarray): F1-score values at each confidence threshold.
|
||||
ap (np.ndarray): Average precision for each class at different IoU thresholds.
|
||||
unique_classes (np.ndarray): An array of unique classes that have data.
|
||||
|
||||
"""
|
||||
|
||||
# Sort by objectness
|
||||
|
|
@ -498,32 +493,33 @@ def ap_per_class(tp,
|
|||
|
||||
class Metric(SimpleClass):
|
||||
"""
|
||||
Class for computing evaluation metrics for YOLOv8 model.
|
||||
Class for computing evaluation metrics for YOLOv8 model.
|
||||
|
||||
Attributes:
|
||||
p (list): Precision for each class. Shape: (nc,).
|
||||
r (list): Recall for each class. Shape: (nc,).
|
||||
f1 (list): F1 score for each class. Shape: (nc,).
|
||||
all_ap (list): AP scores for all classes and all IoU thresholds. Shape: (nc, 10).
|
||||
ap_class_index (list): Index of class for each AP score. Shape: (nc,).
|
||||
nc (int): Number of classes.
|
||||
Attributes:
|
||||
p (list): Precision for each class. Shape: (nc,).
|
||||
r (list): Recall for each class. Shape: (nc,).
|
||||
f1 (list): F1 score for each class. Shape: (nc,).
|
||||
all_ap (list): AP scores for all classes and all IoU thresholds. Shape: (nc, 10).
|
||||
ap_class_index (list): Index of class for each AP score. Shape: (nc,).
|
||||
nc (int): Number of classes.
|
||||
|
||||
Methods:
|
||||
ap50(): AP at IoU threshold of 0.5 for all classes. Returns: List of AP scores. Shape: (nc,) or [].
|
||||
ap(): AP at IoU thresholds from 0.5 to 0.95 for all classes. Returns: List of AP scores. Shape: (nc,) or [].
|
||||
mp(): Mean precision of all classes. Returns: Float.
|
||||
mr(): Mean recall of all classes. Returns: Float.
|
||||
map50(): Mean AP at IoU threshold of 0.5 for all classes. Returns: Float.
|
||||
map75(): Mean AP at IoU threshold of 0.75 for all classes. Returns: Float.
|
||||
map(): Mean AP at IoU thresholds from 0.5 to 0.95 for all classes. Returns: Float.
|
||||
mean_results(): Mean of results, returns mp, mr, map50, map.
|
||||
class_result(i): Class-aware result, returns p[i], r[i], ap50[i], ap[i].
|
||||
maps(): mAP of each class. Returns: Array of mAP scores, shape: (nc,).
|
||||
fitness(): Model fitness as a weighted combination of metrics. Returns: Float.
|
||||
update(results): Update metric attributes with new evaluation results.
|
||||
"""
|
||||
Methods:
|
||||
ap50(): AP at IoU threshold of 0.5 for all classes. Returns: List of AP scores. Shape: (nc,) or [].
|
||||
ap(): AP at IoU thresholds from 0.5 to 0.95 for all classes. Returns: List of AP scores. Shape: (nc,) or [].
|
||||
mp(): Mean precision of all classes. Returns: Float.
|
||||
mr(): Mean recall of all classes. Returns: Float.
|
||||
map50(): Mean AP at IoU threshold of 0.5 for all classes. Returns: Float.
|
||||
map75(): Mean AP at IoU threshold of 0.75 for all classes. Returns: Float.
|
||||
map(): Mean AP at IoU thresholds from 0.5 to 0.95 for all classes. Returns: Float.
|
||||
mean_results(): Mean of results, returns mp, mr, map50, map.
|
||||
class_result(i): Class-aware result, returns p[i], r[i], ap50[i], ap[i].
|
||||
maps(): mAP of each class. Returns: Array of mAP scores, shape: (nc,).
|
||||
fitness(): Model fitness as a weighted combination of metrics. Returns: Float.
|
||||
update(results): Update metric attributes with new evaluation results.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initializes a Metric instance for computing evaluation metrics for the YOLOv8 model."""
|
||||
self.p = [] # (nc, )
|
||||
self.r = [] # (nc, )
|
||||
self.f1 = [] # (nc, )
|
||||
|
|
@ -606,12 +602,12 @@ class Metric(SimpleClass):
|
|||
return [self.mp, self.mr, self.map50, self.map]
|
||||
|
||||
def class_result(self, i):
|
||||
"""class-aware result, return p[i], r[i], ap50[i], ap[i]."""
|
||||
"""Class-aware result, return p[i], r[i], ap50[i], ap[i]."""
|
||||
return self.p[i], self.r[i], self.ap50[i], self.ap[i]
|
||||
|
||||
@property
|
||||
def maps(self):
|
||||
"""mAP of each class."""
|
||||
"""MAP of each class."""
|
||||
maps = np.zeros(self.nc) + self.map
|
||||
for i, c in enumerate(self.ap_class_index):
|
||||
maps[c] = self.ap[i]
|
||||
|
|
@ -672,6 +668,7 @@ class DetMetrics(SimpleClass):
|
|||
"""
|
||||
|
||||
def __init__(self, save_dir=Path('.'), plot=False, on_plot=None, names=()) -> None:
|
||||
"""Initialize a DetMetrics instance with a save directory, plot flag, callback function, and class names."""
|
||||
self.save_dir = save_dir
|
||||
self.plot = plot
|
||||
self.on_plot = on_plot
|
||||
|
|
@ -756,6 +753,7 @@ class SegmentMetrics(SimpleClass):
|
|||
"""
|
||||
|
||||
def __init__(self, save_dir=Path('.'), plot=False, on_plot=None, names=()) -> None:
|
||||
"""Initialize a SegmentMetrics instance with a save directory, plot flag, callback function, and class names."""
|
||||
self.save_dir = save_dir
|
||||
self.plot = plot
|
||||
self.on_plot = on_plot
|
||||
|
|
@ -865,6 +863,7 @@ class PoseMetrics(SegmentMetrics):
|
|||
"""
|
||||
|
||||
def __init__(self, save_dir=Path('.'), plot=False, on_plot=None, names=()) -> None:
|
||||
"""Initialize the PoseMetrics class with directory path, class names, and plotting options."""
|
||||
super().__init__(save_dir, plot, names)
|
||||
self.save_dir = save_dir
|
||||
self.plot = plot
|
||||
|
|
@ -954,6 +953,7 @@ class ClassifyMetrics(SimpleClass):
|
|||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a ClassifyMetrics instance."""
|
||||
self.top1 = 0
|
||||
self.top5 = 0
|
||||
self.speed = {'preprocess': 0.0, 'inference': 0.0, 'loss': 0.0, 'postprocess': 0.0}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue