diff --git a/pytorch_lightning/callbacks/lr_logger.py b/pytorch_lightning/callbacks/lr_logger.py index f3cd98fe67b610..b24e8fdd4ee40d 100755 --- a/pytorch_lightning/callbacks/lr_logger.py +++ b/pytorch_lightning/callbacks/lr_logger.py @@ -53,7 +53,7 @@ def on_train_start(self, trainer, pl_module): if not trainer.logger: raise MisconfigurationException( - 'Cannot use LearningRateLogger callback with Trainer that have no logger.') + 'Cannot use LearningRateLogger callback with Trainer that has no logger.') # Create uniqe names in the case we have multiple of the same learning # rate schduler + multiple parameter groups