Skip to content

Commit 25a308e

Browse files
committed
Chore: revert some unwanted formatting changes
1 parent 3d16def commit 25a308e

File tree

1 file changed

+2
-6
lines changed

1 file changed

+2
-6
lines changed

src/transformers/trainer.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2427,7 +2427,7 @@ def _inner_training_loop(
24272427
self.state = TrainerState(
24282428
stateful_callbacks=[
24292429
cb for cb in self.callback_handler.callbacks + [self.control] if isinstance(cb, ExportableState)
2430-
],
2430+
]
24312431
)
24322432
self.state.is_hyper_param_search = trial is not None
24332433
self.state.train_batch_size = self._train_batch_size
@@ -3198,10 +3198,7 @@ def _maybe_log_save_evaluate(
31983198
# reset tr_loss to zero
31993199
tr_loss -= tr_loss
32003200

3201-
logs["loss"] = round(
3202-
tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged),
3203-
4,
3204-
)
3201+
logs["loss"] = round(tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged), 4)
32053202
if grad_norm is not None:
32063203
logs["grad_norm"] = grad_norm.item() if isinstance(grad_norm, torch.Tensor) else grad_norm
32073204
if learning_rate is not None:
@@ -5495,7 +5492,6 @@ def create_accelerator_and_postprocess(self):
54955492
self.is_deepspeed_enabled = getattr(self.accelerator.state, "deepspeed_plugin", None) is not None
54965493
self.is_fsdp_enabled = getattr(self.accelerator.state, "fsdp_plugin", None) is not None
54975494
self.is_tp_enabled = getattr(self.accelerator.state, "torch_tp_plugin", None) is not None
5498-
54995495
# post accelerator creation setup
55005496
if self.is_fsdp_enabled:
55015497
fsdp_plugin = self.accelerator.state.fsdp_plugin

0 commit comments

Comments
 (0)