diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py index 56b621c8a7f..350af041866 100755 --- a/src/transformers/trainer.py +++ b/src/transformers/trainer.py @@ -1919,8 +1919,8 @@ class Trainer: run_dir = self._get_output_dir(trial) checkpoints_sorted = self._sorted_checkpoints(use_mtime=False, output_dir=run_dir) - # Delete the last checkpoint when save_total_limit=1 if it's different from the best checkpoint. - if self.state.best_model_checkpoint is not None and self.args.save_total_limit == 1: + # Delete the last checkpoint when save_total_limit=1 if it's different from the best checkpoint and process allowed to save. + if self.args.should_save and self.state.best_model_checkpoint is not None and self.args.save_total_limit == 1: for checkpoint in checkpoints_sorted: if checkpoint != self.state.best_model_checkpoint: logger.info(f"Deleting older checkpoint [{checkpoint}] due to args.save_total_limit")