File tree Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -92,7 +92,7 @@ def init_distributed_device(args):
9292 args .world_size = result ['world_size' ]
9393 args .rank = result ['global_rank' ]
9494 args .local_rank = result ['local_rank' ]
95- args .distributed = args . world_size > 1
95+ args .distributed = result [ 'distributed' ]
9696 device = torch .device (args .device )
9797 return device
9898
@@ -154,12 +154,12 @@ def init_distributed_device_so(
154154 assert torch .cuda .is_available (), f'CUDA is not available but { device } was specified.'
155155
156156 if distributed and device != 'cpu' :
157- device , device_idx = device .split (':' , maxsplit = 1 )
157+ device , * device_idx = device .split (':' , maxsplit = 1 )
158158
159159 # Ignore manually specified device index in distributed mode and
160160 # override with resolved local rank, fewer headaches in most setups.
161161 if device_idx :
162- _logger .warning (f'device index { device_idx } removed from specified ({ device } ).' )
162+ _logger .warning (f'device index { device_idx [ 0 ] } removed from specified ({ device } ).' )
163163
164164 device = f'{ device } :{ local_rank } '
165165
You can’t perform that action at this time.
0 commit comments