mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
fixes issue when saving fsdp via accelerate's FSDP plugin (#24446)
This commit is contained in:
parent
2898fd3968
commit
a6f37f8879
@ -2322,7 +2322,7 @@ class Trainer:
|
||||
torch.save(self.scaler.state_dict(), os.path.join(output_dir, SCALER_NAME))
|
||||
elif self.args.should_save and not self.is_deepspeed_enabled:
|
||||
# deepspeed.save_checkpoint above saves model/optim/sched
|
||||
if self.fsdp:
|
||||
if self.fsdp and not self.is_fsdp_enabled:
|
||||
torch.save(full_osd, os.path.join(output_dir, OPTIMIZER_NAME))
|
||||
else:
|
||||
torch.save(self.optimizer.state_dict(), os.path.join(output_dir, OPTIMIZER_NAME))
|
||||
|
Loading…
Reference in New Issue
Block a user