AutoBatch larger batches on larger GPUs (#16785)
This commit is contained in:
parent
000095d600
commit
8154a27e9b
1 changed files with 1 additions and 1 deletions
|
|
@ -67,7 +67,7 @@ def autobatch(model, imgsz=640, fraction=0.60, batch_size=DEFAULT_CFG.batch):
|
|||
LOGGER.info(f"{prefix}{d} ({properties.name}) {t:.2f}G total, {r:.2f}G reserved, {a:.2f}G allocated, {f:.2f}G free")
|
||||
|
||||
# Profile batch sizes
|
||||
batch_sizes = [1, 2, 4, 8, 16]
|
||||
batch_sizes = [1, 2, 4, 8, 16] if t < 16 else [1, 2, 4, 8, 16, 32, 64]
|
||||
try:
|
||||
img = [torch.empty(b, 3, imgsz, imgsz) for b in batch_sizes]
|
||||
results = profile(img, model, n=1, device=device)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue