We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 47e795b commit 05a8b5eCopy full SHA for 05a8b5e
src/transformers/trainer.py
@@ -3004,7 +3004,7 @@ def _maybe_log_save_evaluate(
3004
# reset tr_loss to zero
3005
tr_loss -= tr_loss
3006
3007
- logs["loss"] = round(tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged), 4)
+ logs["loss"] = tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged)
3008
if grad_norm is not None:
3009
logs["grad_norm"] = grad_norm.item() if isinstance(grad_norm, torch.Tensor) else grad_norm
3010
if learning_rate is not None:
0 commit comments