File tree Expand file tree Collapse file tree 2 files changed +6
-2
lines changed
pytorch_lightning/callbacks Expand file tree Collapse file tree 2 files changed +6
-2
lines changed Original file line number Diff line number Diff line change @@ -301,7 +301,8 @@ def __validate_init_configuration(self):
301301 )
302302 if self .every_n_train_steps > 0 and self .every_n_val_epochs > 0 :
303303 raise MisconfigurationException (
304- f'Invalid values for every_n_train_steps={ self .every_n_train_steps } and every_n_val_epochs={ self .every_n_val_epochs } .'
304+ f'Invalid values for every_n_train_steps={ self .every_n_train_steps } '
305+ ' and every_n_val_epochs={self.every_n_val_epochs}.'
305306 'Both cannot be enabled at the same time.'
306307 )
307308 if self .monitor is None :
Original file line number Diff line number Diff line change @@ -545,7 +545,10 @@ def test_invalid_every_n_train_steps(tmpdir):
545545
546546
547547def test_invalid_every_n_train_steps_val_epochs_combination (tmpdir ):
548- """ Make sure that a MisconfigurationException is raised if both every_n_val_epochs and every_n_train_steps are enabled together. """
548+ """
549+ Test that a MisconfigurationException is raised if both
550+ every_n_val_epochs and every_n_train_steps are enabled together.
551+ """
549552 with pytest .raises (MisconfigurationException , match = r'.*Both cannot be enabled at the same time' ):
550553 ModelCheckpoint (dirpath = tmpdir , every_n_train_steps = 1 , every_n_val_epochs = 2 )
551554 # These should not fail
You can’t perform that action at this time.
0 commit comments