Fix test for sagemaker and TPU integrations

This commit is contained in:
Sylvain Gugger 2021-02-04 15:06:58 -05:00
parent 21b3922e35
commit 4739ce177d

View File

@ -793,7 +793,7 @@ class Trainer:
model = ShardedDDP(model, self.optimizer)
elif is_sagemaker_distributed_available():
model = DDP(model, device_ids=[dist.get_local_rank()], broadcast_buffers=False)
if self.deepspeed:
elif self.deepspeed:
pass # already initialized its own DDP earlier
elif self.args.local_rank != -1:
if self.args.ddp_find_unused_parameters is not None: