mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-03 12:50:06 +06:00
[trainer] round numbers in trainer state (#9491)
* round numbers * style * round only on logging
This commit is contained in:
parent
01a1684078
commit
e6f211cade
@ -964,7 +964,7 @@ class Trainer:
|
||||
# reset tr_loss to zero
|
||||
tr_loss -= tr_loss
|
||||
|
||||
logs["loss"] = tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged)
|
||||
logs["loss"] = round(tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged), 4)
|
||||
# backward compatibility for pytorch schedulers
|
||||
logs["learning_rate"] = (
|
||||
self.lr_scheduler.get_last_lr()[0]
|
||||
@ -1168,7 +1168,7 @@ class Trainer:
|
||||
The values to log.
|
||||
"""
|
||||
if self.state.epoch is not None:
|
||||
logs["epoch"] = self.state.epoch
|
||||
logs["epoch"] = round(self.state.epoch, 2)
|
||||
|
||||
self.control = self.callback_handler.on_log(self.args, self.state, self.control, logs)
|
||||
output = {**logs, **{"step": self.state.global_step}}
|
||||
|
Loading…
Reference in New Issue
Block a user