Fix DDP when device is a list (#4600)
Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
This commit is contained in:
parent
23b4f697c9
commit
53b4f8c713
2 changed files with 9 additions and 2 deletions
|
|
@ -164,7 +164,7 @@ class BaseTrainer:
|
|||
"""Allow device='', device=None on Multi-GPU systems to default to device=0."""
|
||||
if isinstance(self.args.device, str) and len(self.args.device): # i.e. device='0' or device='0,1,2,3'
|
||||
world_size = len(self.args.device.split(','))
|
||||
elif isinstance(self.args.device, tuple): # multi devices from cli is tuple type
|
||||
elif isinstance(self.args.device, (tuple, list)): # i.e. device=[0, 1, 2, 3] (multi-GPU from CLI is list)
|
||||
world_size = len(self.args.device)
|
||||
elif torch.cuda.is_available(): # i.e. device=None or device='' or device=number
|
||||
world_size = 1 # default to device 0
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue