Skip to content

Commit

Permalink
fixes issue when saving fsdp via accelerate's FSDP plugin (#24446)
Browse files Browse the repository at this point in the history
  • Loading branch information
pacman100 authored Jun 23, 2023
1 parent 2898fd3 commit a6f37f8
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion src/transformers/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2322,7 +2322,7 @@ def _save_checkpoint(self, model, trial, metrics=None):
torch.save(self.scaler.state_dict(), os.path.join(output_dir, SCALER_NAME))
elif self.args.should_save and not self.is_deepspeed_enabled:
# deepspeed.save_checkpoint above saves model/optim/sched
if self.fsdp:
if self.fsdp and not self.is_fsdp_enabled:
torch.save(full_osd, os.path.join(output_dir, OPTIMIZER_NAME))
else:
torch.save(self.optimizer.state_dict(), os.path.join(output_dir, OPTIMIZER_NAME))
Expand Down

0 comments on commit a6f37f8

Please sign in to comment.