ultralytics 8.0.53 DDP AMP and Edge TPU fixes (#1362)

Co-authored-by: Richard Aljaste <richardaljasteabramson@gmail.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Vuong Kha Sieu <75152429+hotfur@users.noreply.github.com>
This commit is contained in:
Glenn Jocher 2023-03-12 02:08:13 +01:00 committed by GitHub
parent 177a68b39f
commit f921e1ac21
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
46 changed files with 1045 additions and 384 deletions

View file

@ -33,7 +33,7 @@ TORCH_1_12 = check_version(torch.__version__, '1.12.0')
def torch_distributed_zero_first(local_rank: int):
# Decorator to make all processes in distributed training wait for each local_master to do something
initialized = torch.distributed.is_available() and torch.distributed.is_initialized()
if initialized and local_rank not in {-1, 0}:
if initialized and local_rank not in (-1, 0):
dist.barrier(device_ids=[local_rank])
yield
if initialized and local_rank == 0: