diff --git a/pytorch_lightning/callbacks/model_checkpoint.py b/pytorch_lightning/callbacks/model_checkpoint.py index 0d8d4cec81518..d257e1ea7cc0d 100644 --- a/pytorch_lightning/callbacks/model_checkpoint.py +++ b/pytorch_lightning/callbacks/model_checkpoint.py @@ -246,9 +246,9 @@ def __validate_init_configuration(self): ' configuration. No quantity for top_k to track.' ) if self.save_last: - raise MisconfigurationException( - 'ModelCheckpoint(save_last=True, monitor=None) is not a valid configuration.' - ' You can save the last checkpoint with ModelCheckpoint(save_top_k=None, monitor=None)' + rank_zero_warn( + 'ModelCheckpoint(save_last=True, monitor=None) is a redundant configuration.' + ' You can save the last checkpoint with ModelCheckpoint(save_top_k=None, monitor=None).' ) def __init_ckpt_dir(self, filepath, dirpath, filename, save_top_k): diff --git a/tests/checkpointing/test_model_checkpoint.py b/tests/checkpointing/test_model_checkpoint.py index 0e5eb0997b57d..d9e99d463b57d 100644 --- a/tests/checkpointing/test_model_checkpoint.py +++ b/tests/checkpointing/test_model_checkpoint.py @@ -294,8 +294,8 @@ def test_none_monitor_top_k(tmpdir): def test_none_monitor_save_last(tmpdir): """ Test that a warning appears for save_last=True with monitor=None. """ - with pytest.raises( - MisconfigurationException, match=r'ModelCheckpoint\(save_last=True, monitor=None\) is not a valid.*' + with pytest.warns( + UserWarning, match=r'ModelCheckpoint\(save_last=True, monitor=None\) is a redundant.*' ): ModelCheckpoint(dirpath=tmpdir, save_last=True) # These should not fail