diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py index 76e29a1ad51..b4abb8d333f 100755 --- a/src/transformers/trainer.py +++ b/src/transformers/trainer.py @@ -964,7 +964,7 @@ class Trainer: # reset tr_loss to zero tr_loss -= tr_loss - logs["loss"] = tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged) + logs["loss"] = round(tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged), 4) # backward compatibility for pytorch schedulers logs["learning_rate"] = ( self.lr_scheduler.get_last_lr()[0] @@ -1168,7 +1168,7 @@ class Trainer: The values to log. """ if self.state.epoch is not None: - logs["epoch"] = self.state.epoch + logs["epoch"] = round(self.state.epoch, 2) self.control = self.callback_handler.on_log(self.args, self.state, self.control, logs) output = {**logs, **{"step": self.state.global_step}}