[trainer] round numbers in trainer state (#9491)

* round numbers

* style

* round only on logging
This commit is contained in:
Stas Bekman 2021-01-11 10:17:49 -08:00 committed by GitHub
parent 01a1684078
commit e6f211cade
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -964,7 +964,7 @@ class Trainer:
# reset tr_loss to zero
tr_loss -= tr_loss
logs["loss"] = tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged)
logs["loss"] = round(tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged), 4)
# backward compatibility for pytorch schedulers
logs["learning_rate"] = (
self.lr_scheduler.get_last_lr()[0]
@ -1168,7 +1168,7 @@ class Trainer:
The values to log.
"""
if self.state.epoch is not None:
logs["epoch"] = self.state.epoch
logs["epoch"] = round(self.state.epoch, 2)
self.control = self.callback_handler.on_log(self.args, self.state, self.control, logs)
output = {**logs, **{"step": self.state.global_step}}