Skip to content

Commit

Permalink
Improve DummyLogger (#6398)
Browse files Browse the repository at this point in the history
* fix dummy logger

* docs

* update docs

* add changelog

* add none return annotation

* return empty string for name, version
  • Loading branch information
awaelchli authored and carmocca committed Mar 29, 2021
1 parent cc40fa3 commit 1ff33ad
Show file tree
Hide file tree
Showing 4 changed files with 31 additions and 16 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Fixed `Trainer` not resetting `lightning_optimizers` when calling `Trainer.fit()` multiple times ([#6372](https://github.com/PyTorchLightning/pytorch-lightning/pull/6372))


- Fixed `DummyLogger.log_hyperparams` raising a `TypeError` when running with `fast_dev_run=True` ([#6398](https://github.com/PyTorchLightning/pytorch-lightning/pull/6398))


## [1.2.2] - 2021-03-02

### Added
Expand Down
34 changes: 18 additions & 16 deletions pytorch_lightning/loggers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,12 +279,14 @@ def _sanitize_params(params: Dict[str, Any]) -> Dict[str, Any]:
return params

@abstractmethod
def log_hyperparams(self, params: argparse.Namespace):
def log_hyperparams(self, params: argparse.Namespace, *args, **kwargs):
"""
Record hyperparameters.
Args:
params: :class:`~argparse.Namespace` containing the hyperparameters
args: Optional positional arguments, depends on the specific logger being used
kwargs: Optional keywoard arguments, depends on the specific logger being used
"""

def log_graph(self, model: LightningModule, input_array=None) -> None:
Expand Down Expand Up @@ -418,41 +420,41 @@ def nop(*args, **kw):
def __getattr__(self, _):
return self.nop

def __getitem__(self, idx):
# enables self.logger[0].experiment.add_image
# and self.logger.experiment[0].add_image(...)
def __getitem__(self, idx) -> "DummyExperiment":
# enables self.logger.experiment[0].add_image(...)
return self


class DummyLogger(LightningLoggerBase):
""" Dummy logger for internal use. Is usefull if we want to disable users
logger for a feature, but still secure that users code can run """
"""
Dummy logger for internal use. It is useful if we want to disable user's
logger for a feature, but still ensure that user code can run
"""

def __init__(self):
super().__init__()
self._experiment = DummyExperiment()

@property
def experiment(self):
def experiment(self) -> DummyExperiment:
return self._experiment

@rank_zero_only
def log_metrics(self, metrics, step):
def log_metrics(self, *args, **kwargs) -> None:
pass

@rank_zero_only
def log_hyperparams(self, params):
def log_hyperparams(self, *args, **kwargs) -> None:
pass

@property
def name(self):
pass
def name(self) -> str:
return ""

@property
def version(self):
pass
def version(self) -> str:
return ""

def __getitem__(self, idx):
def __getitem__(self, idx) -> "DummyLogger":
# enables self.logger[0].experiment.add_image(...)
return self


Expand Down
9 changes: 9 additions & 0 deletions tests/loggers/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,15 +229,24 @@ def log_metrics(self, metrics, step):


def test_dummyexperiment_support_indexing():
""" Test that the DummyExperiment can imitate indexing the experiment in a LoggerCollection. """
experiment = DummyExperiment()
assert experiment[0] == experiment


def test_dummylogger_support_indexing():
""" Test that the DummyLogger can imitate indexing of a LoggerCollection. """
logger = DummyLogger()
assert logger[0] == logger


def test_dummylogger_noop_method_calls():
""" Test that the DummyLogger methods can be called with arbitrary arguments. """
logger = DummyLogger()
logger.log_hyperparams("1", 2, three="three")
logger.log_metrics("1", 2, three="three")


def test_np_sanitization():

class CustomParamsLogger(CustomLogger):
Expand Down
1 change: 1 addition & 0 deletions tests/trainer/flags/test_fast_dev_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ def test_step(self, batch, batch_idx):
checkpoint_callback = ModelCheckpoint()
early_stopping_callback = EarlyStopping()
trainer_config = dict(
default_root_dir=tmpdir,
fast_dev_run=fast_dev_run,
val_check_interval=2,
logger=True,
Expand Down

0 comments on commit 1ff33ad

Please sign in to comment.