ultralytics 8.0.171 new SAHI guide and callbacks fix (#4748)

Co-authored-by: chuzihang <49548797+Aria-Leo@users.noreply.github.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Sanghyun Choi <farewell518@gmail.com>
Co-authored-by: Awsome <1579093407@qq.com>
Co-authored-by: ConanQZ <49194386+ConanQZ@users.noreply.github.com>
This commit is contained in:
Glenn Jocher 2023-09-05 16:41:14 +02:00 committed by GitHub
parent aa9133bb88
commit 577d066fb2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 215 additions and 35 deletions

View file

@ -12,11 +12,10 @@ import torch
from PIL import Image, ImageDraw, ImageFont
from PIL import __version__ as pil_version
from ultralytics.utils import LOGGER, TryExcept, plt_settings, threaded
from ultralytics.utils import LOGGER, TryExcept, ops, plt_settings, threaded
from .checks import check_font, check_version, is_ascii
from .files import increment_path
from .ops import clip_boxes, scale_image, xywh2xyxy, xyxy2xywh
class Colors:
@ -163,7 +162,7 @@ class Annotator:
im_gpu = im_gpu * inv_alph_masks[-1] + mcs
im_mask = (im_gpu * 255)
im_mask_np = im_mask.byte().cpu().numpy()
self.im[:] = im_mask_np if retina_masks else scale_image(im_mask_np, self.im.shape)
self.im[:] = im_mask_np if retina_masks else ops.scale_image(im_mask_np, self.im.shape)
if self.pil:
# Convert im back to PIL and update draw
self.fromarray(self.im)
@ -268,8 +267,9 @@ def plot_labels(boxes, cls, names=(), save_dir=Path(''), on_plot=None):
import pandas as pd
import seaborn as sn
# Filter matplotlib>=3.7.2 warning
# Filter matplotlib>=3.7.2 warning and Seaborn use_inf and is_categorical FutureWarnings
warnings.filterwarnings('ignore', category=UserWarning, message='The figure layout has changed to tight')
warnings.filterwarnings('ignore', category=FutureWarning)
# Plot dataset labels
LOGGER.info(f"Plotting labels to {save_dir / 'labels.jpg'}... ")
@ -285,8 +285,8 @@ def plot_labels(boxes, cls, names=(), save_dir=Path(''), on_plot=None):
# Matplotlib labels
ax = plt.subplots(2, 2, figsize=(8, 8), tight_layout=True)[1].ravel()
y = ax[0].hist(cls, bins=np.linspace(0, nc, nc + 1) - 0.5, rwidth=0.8)
with contextlib.suppress(Exception): # color histogram bars by class
[y[2].patches[i].set_color([x / 255 for x in colors(i)]) for i in range(nc)] # known issue #3195
for i in range(nc):
y[2].patches[i].set_color([x / 255 for x in colors(i)])
ax[0].set_ylabel('instances')
if 0 < len(names) < 30:
ax[0].set_xticks(range(len(names)))
@ -298,7 +298,7 @@ def plot_labels(boxes, cls, names=(), save_dir=Path(''), on_plot=None):
# Rectangles
boxes[:, 0:2] = 0.5 # center
boxes = xywh2xyxy(boxes) * 1000
boxes = ops.xywh2xyxy(boxes) * 1000
img = Image.fromarray(np.ones((1000, 1000, 3), dtype=np.uint8) * 255)
for cls, box in zip(cls[:500], boxes[:500]):
ImageDraw.Draw(img).rectangle(box, width=1, outline=colors(cls)) # plot
@ -348,12 +348,12 @@ def save_one_box(xyxy, im, file=Path('im.jpg'), gain=1.02, pad=10, square=False,
if not isinstance(xyxy, torch.Tensor): # may be list
xyxy = torch.stack(xyxy)
b = xyxy2xywh(xyxy.view(-1, 4)) # boxes
b = ops.xyxy2xywh(xyxy.view(-1, 4)) # boxes
if square:
b[:, 2:] = b[:, 2:].max(1)[0].unsqueeze(1) # attempt rectangle to square
b[:, 2:] = b[:, 2:] * gain + pad # box wh * gain + pad
xyxy = xywh2xyxy(b).long()
clip_boxes(xyxy, im.shape)
xyxy = ops.xywh2xyxy(b).long()
ops.clip_boxes(xyxy, im.shape)
crop = im[int(xyxy[0, 1]):int(xyxy[0, 3]), int(xyxy[0, 0]):int(xyxy[0, 2]), ::(1 if BGR else -1)]
if save:
file.parent.mkdir(parents=True, exist_ok=True) # make directory
@ -425,7 +425,7 @@ def plot_images(images,
classes = cls[idx].astype('int')
if len(bboxes):
boxes = xywh2xyxy(bboxes[idx, :4]).T
boxes = ops.xywh2xyxy(bboxes[idx, :4]).T
labels = bboxes.shape[1] == 4 # labels if no conf column
conf = None if labels else bboxes[idx, 4] # check for confidence presence (label vs pred)
@ -554,7 +554,7 @@ def output_to_target(output, max_det=300):
for i, o in enumerate(output):
box, conf, cls = o[:max_det, :6].cpu().split((4, 1, 1), 1)
j = torch.full((conf.shape[0], 1), i)
targets.append(torch.cat((j, cls, xyxy2xywh(box), conf), 1))
targets.append(torch.cat((j, cls, ops.xyxy2xywh(box), conf), 1))
targets = torch.cat(targets, 0).numpy()
return targets[:, 0], targets[:, 1], targets[:, 2:]