From a83a6007663ac87b3de508c821defd3790311e85 Mon Sep 17 00:00:00 2001 From: rohitgr7 Date: Fri, 21 Aug 2020 02:52:08 +0530 Subject: [PATCH 1/7] Change LearningRateLogger to LearningRateMonitor --- pytorch_lightning/callbacks/__init__.py | 4 +- pytorch_lightning/callbacks/lr_logger.py | 26 ++++----- tests/callbacks/test_lr_logger.py | 68 +++++++++++++++--------- 3 files changed, 59 insertions(+), 39 deletions(-) diff --git a/pytorch_lightning/callbacks/__init__.py b/pytorch_lightning/callbacks/__init__.py index eab698d06dfc8..ee2313109af88 100644 --- a/pytorch_lightning/callbacks/__init__.py +++ b/pytorch_lightning/callbacks/__init__.py @@ -1,7 +1,7 @@ from pytorch_lightning.callbacks.base import Callback from pytorch_lightning.callbacks.early_stopping import EarlyStopping from pytorch_lightning.callbacks.gradient_accumulation_scheduler import GradientAccumulationScheduler -from pytorch_lightning.callbacks.lr_logger import LearningRateLogger +from pytorch_lightning.callbacks.lr_logger import LearningRateMonitor from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint from pytorch_lightning.callbacks.progress import ProgressBarBase, ProgressBar from pytorch_lightning.callbacks.gpu_stats_monitor import GPUStatsMonitor @@ -11,7 +11,7 @@ 'EarlyStopping', 'ModelCheckpoint', 'GradientAccumulationScheduler', - 'LearningRateLogger', + 'LearningRateMonitor', 'ProgressBarBase', 'ProgressBar', 'GPUStatsMonitor' diff --git a/pytorch_lightning/callbacks/lr_logger.py b/pytorch_lightning/callbacks/lr_logger.py index 4209f8e8b6214..73306c3e3665b 100755 --- a/pytorch_lightning/callbacks/lr_logger.py +++ b/pytorch_lightning/callbacks/lr_logger.py @@ -14,10 +14,10 @@ r""" -Learning Rate Logger +Learning Rate Monitor ==================== -Log learning rate for lr schedulers during training +Monitor and logs learning rate for lr schedulers during training. """ @@ -28,9 +28,9 @@ from pytorch_lightning.utilities.exceptions import MisconfigurationException -class LearningRateLogger(Callback): +class LearningRateMonitor(Callback): r""" - Automatically logs learning rate for learning rate schedulers during training. + Automatically monitor and logs learning rate for learning rate schedulers during training. Args: logging_interval: set to `epoch` or `step` to log `lr` of all optimizers @@ -40,9 +40,9 @@ class LearningRateLogger(Callback): Example:: >>> from pytorch_lightning import Trainer - >>> from pytorch_lightning.callbacks import LearningRateLogger - >>> lr_logger = LearningRateLogger(logging_interval='step') - >>> trainer = Trainer(callbacks=[lr_logger]) + >>> from pytorch_lightning.callbacks import LearningRateMonitor + >>> lr_monitor = LearningRateMonitor(logging_interval='step') + >>> trainer = Trainer(callbacks=[lr_monitor]) Logging names are automatically determined based on optimizer class name. In case of multiple optimizers of same type, they will be named `Adam`, @@ -57,6 +57,7 @@ def configure_optimizer(self): lr_scheduler = {'scheduler': torch.optim.lr_schedulers.LambdaLR(optimizer, ...) 'name': 'my_logging_name'} return [optimizer], [lr_scheduler] + """ def __init__(self, logging_interval: Optional[str] = None): if logging_interval not in (None, 'step', 'epoch'): @@ -69,18 +70,19 @@ def __init__(self, logging_interval: Optional[str] = None): self.lr_sch_names = [] def on_train_start(self, trainer, pl_module): - """ Called before training, determines unique names for all lr - schedulers in the case of multiple of the same type or in - the case of multiple parameter groups + """ + Called before training, determines unique names for all lr + schedulers in the case of multiple of the same type or in + the case of multiple parameter groups """ if not trainer.logger: raise MisconfigurationException( - 'Cannot use LearningRateLogger callback with Trainer that has no logger.' + 'Cannot use LearningRateMonitor callback with Trainer that has no logger.' ) if not trainer.lr_schedulers: rank_zero_warn( - 'You are using LearningRateLogger callback with models that' + 'You are using LearningRateMonitor callback with models that' ' have no learning rate schedulers. Please see documentation' ' for `configure_optimizers` method.', RuntimeWarning ) diff --git a/tests/callbacks/test_lr_logger.py b/tests/callbacks/test_lr_logger.py index 264329fa3ffc3..4370150768504 100644 --- a/tests/callbacks/test_lr_logger.py +++ b/tests/callbacks/test_lr_logger.py @@ -1,78 +1,96 @@ import pytest -import tests.base.develop_utils as tutils from pytorch_lightning import Trainer -from pytorch_lightning.callbacks import LearningRateLogger +from pytorch_lightning.callbacks import LearningRateMonitor +from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.base import EvalModelTemplate +import tests.base.develop_utils as tutils -def test_lr_logger_single_lr(tmpdir): +def test_lr_monitor_single_lr(tmpdir): """ Test that learning rates are extracted and logged for single lr scheduler. """ tutils.reset_seed() model = EvalModelTemplate() model.configure_optimizers = model.configure_optimizers__single_scheduler - lr_logger = LearningRateLogger() + lr_monitor = LearningRateMonitor() trainer = Trainer( default_root_dir=tmpdir, max_epochs=2, limit_val_batches=0.1, limit_train_batches=0.5, - callbacks=[lr_logger], + callbacks=[lr_monitor], ) result = trainer.fit(model) assert result - assert lr_logger.lrs, 'No learning rates logged' - assert len(lr_logger.lrs) == len(trainer.lr_schedulers), \ + assert lr_monitor.lrs, 'No learning rates logged' + assert len(lr_monitor.lrs) == len(trainer.lr_schedulers), \ 'Number of learning rates logged does not match number of lr schedulers' - assert all([k in ['lr-Adam'] for k in lr_logger.lrs.keys()]), \ + assert all([k in ['lr-Adam'] for k in lr_monitor.lrs.keys()]), \ 'Names of learning rates not set correctly' -def test_lr_logger_no_lr(tmpdir): +def test_lr_monitor_no_lr_scheduler(tmpdir): tutils.reset_seed() model = EvalModelTemplate() - lr_logger = LearningRateLogger() + lr_monitor = LearningRateMonitor() trainer = Trainer( default_root_dir=tmpdir, max_epochs=2, limit_val_batches=0.1, limit_train_batches=0.5, - callbacks=[lr_logger], + callbacks=[lr_monitor], ) - with pytest.warns(RuntimeWarning): + with pytest.warns(RuntimeWarning, match='have no learning rate schedulers'): result = trainer.fit(model) assert result +def test_lr_monitor_no_logger(tmpdir): + tutils.reset_seed() + + model = EvalModelTemplate() + + lr_monitor = LearningRateMonitor() + trainer = Trainer( + default_root_dir=tmpdir, + max_epochs=1, + callbacks=[lr_monitor], + logger=False + ) + + with pytest.raises(MisconfigurationException, match='Trainer that has no logger'): + trainer.fit(model) + + @pytest.mark.parametrize("logging_interval", ['step', 'epoch']) -def test_lr_logger_multi_lrs(tmpdir, logging_interval): +def test_lr_monitor_multi_lrs(tmpdir, logging_interval): """ Test that learning rates are extracted and logged for multi lr schedulers. """ tutils.reset_seed() model = EvalModelTemplate() model.configure_optimizers = model.configure_optimizers__multiple_schedulers - lr_logger = LearningRateLogger(logging_interval=logging_interval) + lr_monitor = LearningRateMonitor(logging_interval=logging_interval) trainer = Trainer( default_root_dir=tmpdir, max_epochs=2, limit_val_batches=0.1, limit_train_batches=0.5, - callbacks=[lr_logger], + callbacks=[lr_monitor], ) result = trainer.fit(model) assert result - assert lr_logger.lrs, 'No learning rates logged' - assert len(lr_logger.lrs) == len(trainer.lr_schedulers), \ + assert lr_monitor.lrs, 'No learning rates logged' + assert len(lr_monitor.lrs) == len(trainer.lr_schedulers), \ 'Number of learning rates logged does not match number of lr schedulers' - assert all([k in ['lr-Adam', 'lr-Adam-1'] for k in lr_logger.lrs.keys()]), \ + assert all([k in ['lr-Adam', 'lr-Adam-1'] for k in lr_monitor.lrs.keys()]), \ 'Names of learning rates not set correctly' if logging_interval == 'step': @@ -80,30 +98,30 @@ def test_lr_logger_multi_lrs(tmpdir, logging_interval): if logging_interval == 'epoch': expected_number_logged = trainer.max_epochs - assert all(len(lr) == expected_number_logged for lr in lr_logger.lrs.values()), \ + assert all(len(lr) == expected_number_logged for lr in lr_monitor.lrs.values()), \ 'Length of logged learning rates do not match the expected number' -def test_lr_logger_param_groups(tmpdir): +def test_lr_monitor_param_groups(tmpdir): """ Test that learning rates are extracted and logged for single lr scheduler. """ tutils.reset_seed() model = EvalModelTemplate() model.configure_optimizers = model.configure_optimizers__param_groups - lr_logger = LearningRateLogger() + lr_monitor = LearningRateMonitor() trainer = Trainer( default_root_dir=tmpdir, max_epochs=2, limit_val_batches=0.1, limit_train_batches=0.5, - callbacks=[lr_logger], + callbacks=[lr_monitor], ) result = trainer.fit(model) assert result - assert lr_logger.lrs, 'No learning rates logged' - assert len(lr_logger.lrs) == 2 * len(trainer.lr_schedulers), \ + assert lr_monitor.lrs, 'No learning rates logged' + assert len(lr_monitor.lrs) == 2 * len(trainer.lr_schedulers), \ 'Number of learning rates logged does not match number of param groups' - assert all([k in ['lr-Adam/pg1', 'lr-Adam/pg2'] for k in lr_logger.lrs.keys()]), \ + assert all([k in ['lr-Adam/pg1', 'lr-Adam/pg2'] for k in lr_monitor.lrs.keys()]), \ 'Names of learning rates not set correctly' From 4ab9aa850db3c72cf369e21781b26b8e903d8cf0 Mon Sep 17 00:00:00 2001 From: rohitgr7 Date: Sat, 29 Aug 2020 02:16:53 +0530 Subject: [PATCH 2/7] file rename --- pytorch_lightning/callbacks/__init__.py | 12 ++++++------ .../callbacks/{lr_logger.py => lr_monitor.py} | 3 ++- .../{test_lr_logger.py => test_lr_monitor.py} | 0 3 files changed, 8 insertions(+), 7 deletions(-) rename pytorch_lightning/callbacks/{lr_logger.py => lr_monitor.py} (99%) rename tests/callbacks/{test_lr_logger.py => test_lr_monitor.py} (100%) diff --git a/pytorch_lightning/callbacks/__init__.py b/pytorch_lightning/callbacks/__init__.py index ee2313109af88..6da2e84f1d20b 100644 --- a/pytorch_lightning/callbacks/__init__.py +++ b/pytorch_lightning/callbacks/__init__.py @@ -1,18 +1,18 @@ from pytorch_lightning.callbacks.base import Callback from pytorch_lightning.callbacks.early_stopping import EarlyStopping +from pytorch_lightning.callbacks.gpu_stats_monitor import GPUStatsMonitor from pytorch_lightning.callbacks.gradient_accumulation_scheduler import GradientAccumulationScheduler -from pytorch_lightning.callbacks.lr_logger import LearningRateMonitor +from pytorch_lightning.callbacks.lr_monitor import LearningRateMonitor from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint -from pytorch_lightning.callbacks.progress import ProgressBarBase, ProgressBar -from pytorch_lightning.callbacks.gpu_stats_monitor import GPUStatsMonitor +from pytorch_lightning.callbacks.progress import ProgressBar, ProgressBarBase __all__ = [ 'Callback', 'EarlyStopping', - 'ModelCheckpoint', + 'GPUStatsMonitor', 'GradientAccumulationScheduler', 'LearningRateMonitor', - 'ProgressBarBase', + 'ModelCheckpoint', 'ProgressBar', - 'GPUStatsMonitor' + 'ProgressBarBase', ] diff --git a/pytorch_lightning/callbacks/lr_logger.py b/pytorch_lightning/callbacks/lr_monitor.py similarity index 99% rename from pytorch_lightning/callbacks/lr_logger.py rename to pytorch_lightning/callbacks/lr_monitor.py index 73306c3e3665b..8c9c9af351fb6 100755 --- a/pytorch_lightning/callbacks/lr_logger.py +++ b/pytorch_lightning/callbacks/lr_monitor.py @@ -57,7 +57,7 @@ def configure_optimizer(self): lr_scheduler = {'scheduler': torch.optim.lr_schedulers.LambdaLR(optimizer, ...) 'name': 'my_logging_name'} return [optimizer], [lr_scheduler] - + """ def __init__(self, logging_interval: Optional[str] = None): if logging_interval not in (None, 'step', 'epoch'): @@ -137,6 +137,7 @@ def _find_names(self, lr_schedulers): else: opt_name = 'lr-' + sch.optimizer.__class__.__name__ i, name = 1, opt_name + # Multiple schduler of the same type while True: if name not in names: diff --git a/tests/callbacks/test_lr_logger.py b/tests/callbacks/test_lr_monitor.py similarity index 100% rename from tests/callbacks/test_lr_logger.py rename to tests/callbacks/test_lr_monitor.py From 84a3c5b7ce9bdc2841d8b86f9dc6d4afa85c9995 Mon Sep 17 00:00:00 2001 From: rohitgr7 Date: Sat, 29 Aug 2020 02:23:15 +0530 Subject: [PATCH 3/7] docs --- docs/source/callbacks.rst | 2 +- pytorch_lightning/callbacks/lr_monitor.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/callbacks.rst b/docs/source/callbacks.rst index c3a85887b851d..a73a70fb79198 100644 --- a/docs/source/callbacks.rst +++ b/docs/source/callbacks.rst @@ -120,7 +120,7 @@ Lightning has a few built-in callbacks. ---------------- -.. automodule:: pytorch_lightning.callbacks.lr_logger +.. automodule:: pytorch_lightning.callbacks.lr_monitor :noindex: :exclude-members: _extract_lr, diff --git a/pytorch_lightning/callbacks/lr_monitor.py b/pytorch_lightning/callbacks/lr_monitor.py index 8c9c9af351fb6..da87e6d5a548c 100755 --- a/pytorch_lightning/callbacks/lr_monitor.py +++ b/pytorch_lightning/callbacks/lr_monitor.py @@ -15,7 +15,7 @@ r""" Learning Rate Monitor -==================== +===================== Monitor and logs learning rate for lr schedulers during training. From cc0c303b7b95a882f57cd406f825e59dd342c56a Mon Sep 17 00:00:00 2001 From: rohitgr7 Date: Sat, 29 Aug 2020 22:53:36 +0530 Subject: [PATCH 4/7] add LearningRateLogger with deprecation warning --- pytorch_lightning/callbacks/__init__.py | 3 ++- pytorch_lightning/callbacks/lr_monitor.py | 7 +++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/callbacks/__init__.py b/pytorch_lightning/callbacks/__init__.py index 6da2e84f1d20b..074e98186eb74 100644 --- a/pytorch_lightning/callbacks/__init__.py +++ b/pytorch_lightning/callbacks/__init__.py @@ -2,7 +2,7 @@ from pytorch_lightning.callbacks.early_stopping import EarlyStopping from pytorch_lightning.callbacks.gpu_stats_monitor import GPUStatsMonitor from pytorch_lightning.callbacks.gradient_accumulation_scheduler import GradientAccumulationScheduler -from pytorch_lightning.callbacks.lr_monitor import LearningRateMonitor +from pytorch_lightning.callbacks.lr_monitor import LearningRateLogger, LearningRateMonitor from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint from pytorch_lightning.callbacks.progress import ProgressBar, ProgressBarBase @@ -11,6 +11,7 @@ 'EarlyStopping', 'GPUStatsMonitor', 'GradientAccumulationScheduler', + 'LearningRateLogger', 'LearningRateMonitor', 'ModelCheckpoint', 'ProgressBar', diff --git a/pytorch_lightning/callbacks/lr_monitor.py b/pytorch_lightning/callbacks/lr_monitor.py index da87e6d5a548c..9d6df7308dadf 100755 --- a/pytorch_lightning/callbacks/lr_monitor.py +++ b/pytorch_lightning/callbacks/lr_monitor.py @@ -157,3 +157,10 @@ def _find_names(self, lr_schedulers): self.lr_sch_names.append(name) return names + + +class LearningRateLogger(LearningRateMonitor): + def __init__(self, *args, **kwargs): + rank_zero_warn("`LearningRateLogger` is now `LearningRateMonitor`" + " and this will be removed in v0.10.0", DeprecationWarning) + super().__init__(*args, **kwargs) From da91286349c5f267fb811a90e49f961ca162c7c0 Mon Sep 17 00:00:00 2001 From: rohitgr7 Date: Sun, 30 Aug 2020 03:57:22 +0530 Subject: [PATCH 5/7] deprecated LearningRateLogger --- pytorch_lightning/callbacks/__init__.py | 3 ++- pytorch_lightning/callbacks/lr_logger.py | 9 +++++++++ pytorch_lightning/callbacks/lr_monitor.py | 7 ------- tests/callbacks/test_lr_monitor.py | 10 +++++++++- 4 files changed, 20 insertions(+), 9 deletions(-) create mode 100644 pytorch_lightning/callbacks/lr_logger.py diff --git a/pytorch_lightning/callbacks/__init__.py b/pytorch_lightning/callbacks/__init__.py index 074e98186eb74..039f4077ef265 100644 --- a/pytorch_lightning/callbacks/__init__.py +++ b/pytorch_lightning/callbacks/__init__.py @@ -2,7 +2,8 @@ from pytorch_lightning.callbacks.early_stopping import EarlyStopping from pytorch_lightning.callbacks.gpu_stats_monitor import GPUStatsMonitor from pytorch_lightning.callbacks.gradient_accumulation_scheduler import GradientAccumulationScheduler -from pytorch_lightning.callbacks.lr_monitor import LearningRateLogger, LearningRateMonitor +from pytorch_lightning.callbacks.lr_logger import LearningRateLogger +from pytorch_lightning.callbacks.lr_monitor import LearningRateMonitor from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint from pytorch_lightning.callbacks.progress import ProgressBar, ProgressBarBase diff --git a/pytorch_lightning/callbacks/lr_logger.py b/pytorch_lightning/callbacks/lr_logger.py new file mode 100644 index 0000000000000..70a5ca280e4ef --- /dev/null +++ b/pytorch_lightning/callbacks/lr_logger.py @@ -0,0 +1,9 @@ +from pytorch_lightning.callbacks.lr_monitor import LearningRateMonitor +from pytorch_lightning.utilities import rank_zero_warn + + +class LearningRateLogger(LearningRateMonitor): + def __init__(self, *args, **kwargs): + rank_zero_warn("`LearningRateLogger` is now `LearningRateMonitor`" + " and this will be removed in v0.10.0", DeprecationWarning) + super().__init__(*args, **kwargs) diff --git a/pytorch_lightning/callbacks/lr_monitor.py b/pytorch_lightning/callbacks/lr_monitor.py index 9d6df7308dadf..da87e6d5a548c 100755 --- a/pytorch_lightning/callbacks/lr_monitor.py +++ b/pytorch_lightning/callbacks/lr_monitor.py @@ -157,10 +157,3 @@ def _find_names(self, lr_schedulers): self.lr_sch_names.append(name) return names - - -class LearningRateLogger(LearningRateMonitor): - def __init__(self, *args, **kwargs): - rank_zero_warn("`LearningRateLogger` is now `LearningRateMonitor`" - " and this will be removed in v0.10.0", DeprecationWarning) - super().__init__(*args, **kwargs) diff --git a/tests/callbacks/test_lr_monitor.py b/tests/callbacks/test_lr_monitor.py index 4370150768504..299b5e0dc96c0 100644 --- a/tests/callbacks/test_lr_monitor.py +++ b/tests/callbacks/test_lr_monitor.py @@ -1,7 +1,7 @@ import pytest from pytorch_lightning import Trainer -from pytorch_lightning.callbacks import LearningRateMonitor +from pytorch_lightning.callbacks import LearningRateLogger, LearningRateMonitor from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.base import EvalModelTemplate import tests.base.develop_utils as tutils @@ -125,3 +125,11 @@ def test_lr_monitor_param_groups(tmpdir): 'Number of learning rates logged does not match number of param groups' assert all([k in ['lr-Adam/pg1', 'lr-Adam/pg2'] for k in lr_monitor.lrs.keys()]), \ 'Names of learning rates not set correctly' + + +def test_lr_logger_deprecated(tmpdir): + """ Test the deprecation warning for LearningRateLogger. """ + tutils.reset_seed() + + with pytest.warns(DeprecationWarning, match='is now `LearningRateMonitor`'): + lr_logger = LearningRateLogger() From ea9cd451faf1c0604cc874b9a409169579fe4bce Mon Sep 17 00:00:00 2001 From: rohitgr7 Date: Tue, 1 Sep 2020 22:56:59 +0530 Subject: [PATCH 6/7] move deprecation check --- pytorch_lightning/callbacks/lr_logger.py | 2 +- tests/callbacks/test_lr_monitor.py | 10 +--------- tests/test_deprecated.py | 6 ++++++ 3 files changed, 8 insertions(+), 10 deletions(-) diff --git a/pytorch_lightning/callbacks/lr_logger.py b/pytorch_lightning/callbacks/lr_logger.py index 70a5ca280e4ef..76ade47087743 100644 --- a/pytorch_lightning/callbacks/lr_logger.py +++ b/pytorch_lightning/callbacks/lr_logger.py @@ -5,5 +5,5 @@ class LearningRateLogger(LearningRateMonitor): def __init__(self, *args, **kwargs): rank_zero_warn("`LearningRateLogger` is now `LearningRateMonitor`" - " and this will be removed in v0.10.0", DeprecationWarning) + " and this will be removed in v0.11.0", DeprecationWarning) super().__init__(*args, **kwargs) diff --git a/tests/callbacks/test_lr_monitor.py b/tests/callbacks/test_lr_monitor.py index 299b5e0dc96c0..4370150768504 100644 --- a/tests/callbacks/test_lr_monitor.py +++ b/tests/callbacks/test_lr_monitor.py @@ -1,7 +1,7 @@ import pytest from pytorch_lightning import Trainer -from pytorch_lightning.callbacks import LearningRateLogger, LearningRateMonitor +from pytorch_lightning.callbacks import LearningRateMonitor from pytorch_lightning.utilities.exceptions import MisconfigurationException from tests.base import EvalModelTemplate import tests.base.develop_utils as tutils @@ -125,11 +125,3 @@ def test_lr_monitor_param_groups(tmpdir): 'Number of learning rates logged does not match number of param groups' assert all([k in ['lr-Adam/pg1', 'lr-Adam/pg2'] for k in lr_monitor.lrs.keys()]), \ 'Names of learning rates not set correctly' - - -def test_lr_logger_deprecated(tmpdir): - """ Test the deprecation warning for LearningRateLogger. """ - tutils.reset_seed() - - with pytest.warns(DeprecationWarning, match='is now `LearningRateMonitor`'): - lr_logger = LearningRateLogger() diff --git a/tests/test_deprecated.py b/tests/test_deprecated.py index a34b3b3d72232..a3ce174713d2b 100644 --- a/tests/test_deprecated.py +++ b/tests/test_deprecated.py @@ -6,6 +6,7 @@ import torch from pytorch_lightning import Trainer +from pytorch_lightning.callbacks import LearningRateLogger from tests.base import EvalModelTemplate @@ -15,6 +16,11 @@ def _soft_unimport_module(str_module): del sys.modules[str_module] +def test_tbd_remove_in_v0_11_0_trainer(): + with pytest.deprecated_call(match='will be removed in v0.11.0'): + lr_logger = LearningRateLogger() + + def test_tbd_remove_in_v0_10_0_trainer(): rnd_val = random.random() with pytest.deprecated_call(match='will be removed in v0.10.0'): From 04dd6a352ecc708d54565733fdda06623fa4b701 Mon Sep 17 00:00:00 2001 From: Jirka Borovec Date: Thu, 3 Sep 2020 11:24:32 +0200 Subject: [PATCH 7/7] chlog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d18fac2668279..bb17e95121972 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). ### Changed +- Changed LearningRateLogger to LearningRateMonitor ([#3251](https://github.com/PyTorchLightning/pytorch-lightning/pull/3251)) ### Deprecated