File tree Expand file tree Collapse file tree 1 file changed +2
-6
lines changed Expand file tree Collapse file tree 1 file changed +2
-6
lines changed Original file line number Diff line number Diff line change @@ -2427,7 +2427,7 @@ def _inner_training_loop(
24272427 self .state = TrainerState (
24282428 stateful_callbacks = [
24292429 cb for cb in self .callback_handler .callbacks + [self .control ] if isinstance (cb , ExportableState )
2430- ],
2430+ ]
24312431 )
24322432 self .state .is_hyper_param_search = trial is not None
24332433 self .state .train_batch_size = self ._train_batch_size
@@ -3198,10 +3198,7 @@ def _maybe_log_save_evaluate(
31983198 # reset tr_loss to zero
31993199 tr_loss -= tr_loss
32003200
3201- logs ["loss" ] = round (
3202- tr_loss_scalar / (self .state .global_step - self ._globalstep_last_logged ),
3203- 4 ,
3204- )
3201+ logs ["loss" ] = round (tr_loss_scalar / (self .state .global_step - self ._globalstep_last_logged ), 4 )
32053202 if grad_norm is not None :
32063203 logs ["grad_norm" ] = grad_norm .item () if isinstance (grad_norm , torch .Tensor ) else grad_norm
32073204 if learning_rate is not None :
@@ -5495,7 +5492,6 @@ def create_accelerator_and_postprocess(self):
54955492 self .is_deepspeed_enabled = getattr (self .accelerator .state , "deepspeed_plugin" , None ) is not None
54965493 self .is_fsdp_enabled = getattr (self .accelerator .state , "fsdp_plugin" , None ) is not None
54975494 self .is_tp_enabled = getattr (self .accelerator .state , "torch_tp_plugin" , None ) is not None
5498-
54995495 # post accelerator creation setup
55005496 if self .is_fsdp_enabled :
55015497 fsdp_plugin = self .accelerator .state .fsdp_plugin
You can’t perform that action at this time.
0 commit comments