diff --git a/pytorch_lightning/trainer/connectors/callback_connector.py b/pytorch_lightning/trainer/connectors/callback_connector.py index 0e5ba3170064a..f6a8a252b7c5b 100644 --- a/pytorch_lightning/trainer/connectors/callback_connector.py +++ b/pytorch_lightning/trainer/connectors/callback_connector.py @@ -150,15 +150,6 @@ def _configure_checkpoint_callbacks(self, checkpoint_callback: Optional[bool], e # if both are set then checkpoint only if both are True enable_checkpointing = checkpoint_callback and enable_checkpointing - # TODO: Remove this error in v1.5 so we rely purely on the type signature - if not isinstance(enable_checkpointing, bool): - error_msg = ( - "Invalid type provided for `enable_checkpointing`: " - f"Expected bool but received {type(enable_checkpointing)}." - ) - if isinstance(enable_checkpointing, Callback): - error_msg += " Pass callback instances to the `callbacks` argument in the Trainer constructor instead." - raise MisconfigurationException(error_msg) if self._trainer_has_checkpoint_callbacks() and enable_checkpointing is False: raise MisconfigurationException( "Trainer was configured with `enable_checkpointing=False`" diff --git a/tests/checkpointing/test_model_checkpoint.py b/tests/checkpointing/test_model_checkpoint.py index 891c7c045ceaa..5513339409ab8 100644 --- a/tests/checkpointing/test_model_checkpoint.py +++ b/tests/checkpointing/test_model_checkpoint.py @@ -1187,12 +1187,6 @@ def test_model_checkpoint_mode_options(): ModelCheckpoint(mode="unknown_option") -def test_trainer_checkpoint_callback_bool(tmpdir): - mc = ModelCheckpoint(dirpath=tmpdir) - with pytest.raises(MisconfigurationException, match="Invalid type provided for `enable_checkpointing`"): - Trainer(enable_checkpointing=mc) - - def test_check_val_every_n_epochs_top_k_integration(tmpdir): model = BoringModel() mc = ModelCheckpoint(dirpath=tmpdir, monitor="epoch", save_top_k=-1, filename="{epoch}")