We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 7f5154a commit d497f8dCopy full SHA for d497f8d
src/transformers/training_args.py
@@ -1649,8 +1649,9 @@ def _setup_devices(self) -> "torch.device":
1649
if is_torch_tpu_available():
1650
device = self.distributed_state.device
1651
self._n_gpu = 0
1652
- elif is_sagemaker_dp_enabled():
1653
- self._n_gpu = 1
+ elif is_sagemaker_dp_enabled() or is_sagemaker_mp_enabled():
+ # Already set _n_gpu
1654
+ pass
1655
elif self.distributed_state.distributed_type == DistributedType.NO:
1656
if self.use_mps_device:
1657
if not torch.backends.mps.is_available():
0 commit comments