diff --git a/CHANGELOG.md b/CHANGELOG.md index 97d2fa55fa4ce..62baa2bc6fef3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -98,6 +98,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Expose DeepSpeed loss parameters to allow users to fix loss instability ([#6115](https://github.com/PyTorchLightning/pytorch-lightning/pull/6115)) +- Fixed `AttributeError` when `logger=None` on TPU ([#6221](https://github.com/PyTorchLightning/pytorch-lightning/pull/6221)) + + - Fixed `ModelPruning(make_pruning_permanent=True)` pruning buffers getting removed when saved during training ([#6073](https://github.com/PyTorchLightning/pytorch-lightning/pull/6073)) diff --git a/pytorch_lightning/plugins/training_type/tpu_spawn.py b/pytorch_lightning/plugins/training_type/tpu_spawn.py index 0bea6ed56c5ab..e05a7bc03ef5c 100644 --- a/pytorch_lightning/plugins/training_type/tpu_spawn.py +++ b/pytorch_lightning/plugins/training_type/tpu_spawn.py @@ -262,7 +262,7 @@ def __load_weights_on_main_process(self) -> None: self._model = model def _close_logger(self, trainer) -> None: - if hasattr(trainer, "logger"): + if trainer.logger is not None: trainer.logger.finalize("success") @property