Skip to content

Commit

Permalink
Refactor logging (#15)
Browse files Browse the repository at this point in the history
* Disable hydra logging auto-configuration

* Add logging pretty printing

* Force logging configuration before pytorch lightning

See Lightning-AI/pytorch-lightning#1503

* Enable exceptions pretty printing
  • Loading branch information
lucmos authored Jan 8, 2022
1 parent d0f023c commit b629434
Show file tree
Hide file tree
Showing 4 changed files with 81 additions and 11 deletions.
6 changes: 6 additions & 0 deletions conf/hydra/default.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,9 @@ job:
env_set:
WANDB_START_METHOD: thread
WANDB_DIR: ${oc.env:PROJECT_ROOT}

defaults:
# Disable hydra logging configuration, otherwise the basicConfig
# does not have any effect
- override job_logging: none
- override hydra_logging: none
1 change: 1 addition & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ install_requires =
# hydra-joblib-launcher

# Stable stuff usually backward compatible
rich
dvc
python-dotenv
matplotlib
Expand Down
54 changes: 54 additions & 0 deletions src/nn_template/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,57 @@
import logging
from datetime import datetime
from typing import Optional

from rich.console import ConsoleRenderable
from rich.logging import RichHandler
from rich.traceback import Traceback, install

install()


class CustomRichHandler(RichHandler):
def render(
self,
*,
record: logging.LogRecord,
traceback: Optional[Traceback],
message_renderable: ConsoleRenderable,
) -> ConsoleRenderable:
# Hack to display the logger name instead of the filename in the rich logs
path = record.name # str(Path(record.pathname))
level = self.get_level_text(record)
time_format = None if self.formatter is None else self.formatter.datefmt
log_time = datetime.fromtimestamp(record.created)

log_renderable = self._log_render(
self.console,
[message_renderable] if not traceback else [message_renderable, traceback],
log_time=log_time,
time_format=time_format,
level=level,
path=path,
line_no=record.lineno,
link_path=record.pathname if self.enable_link_path else None,
)
return log_renderable


FORMAT = "%(message)s"
logging.basicConfig(
format=FORMAT,
level=logging.INFO,
datefmt="%Y-%m-%d %H:%M:%S",
handlers=[
CustomRichHandler(
rich_tracebacks=True,
show_level=True,
show_path=True,
show_time=True,
omit_repeated_times=True,
)
],
)

try:
from ._version import __version__ as __version__
except ImportError:
Expand Down
31 changes: 20 additions & 11 deletions src/nn_template/run.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
# Required workaround because PyTorch Lightning configures the logging on import,
# thus the logging configuration defined in the __init__.py must be called before
# the lightning import otherwise it has no effect.
# See https://github.com/PyTorchLightning/pytorch-lightning/issues/1503
#
# Force the execution of __init__.py if this file is executed directly.
import nn_template # isort:skip # noqa

import logging
from pathlib import Path
from typing import List

Expand All @@ -10,12 +19,14 @@

from nn_template.common.utils import PROJECT_ROOT, log_hyperparameters

pylogger = logging.getLogger(__name__)


def build_callbacks(cfg: DictConfig) -> List[Callback]:
callbacks: List[Callback] = []

for callback in cfg:
hydra.utils.log.info(f"Adding callback <{callback['_target_'].split('.')[-1]}>")
pylogger.info(f"Adding callback <{callback['_target_'].split('.')[-1]}>")
callbacks.append(hydra.utils.instantiate(callback, _recursive_=False))

return callbacks
Expand All @@ -30,9 +41,7 @@ def run(cfg: DictConfig) -> None:
seed_everything(cfg.train.random_seed)

if cfg.train.trainer.fast_dev_run:
hydra.utils.log.info(
f"Debug mode <{cfg.train.trainer.fast_dev_run=}>. Forcing debugger friendly configuration!"
)
pylogger.info(f"Debug mode <{cfg.train.trainer.fast_dev_run=}>. Forcing debugger friendly configuration!")
# Debuggers don't like GPUs nor multiprocessing
cfg.train.trainer.gpus = 0
cfg.nn.data.num_workers.train = 0
Expand All @@ -46,11 +55,11 @@ def run(cfg: DictConfig) -> None:
hydra_dir = Path(HydraConfig.get().run.dir)

# Instantiate datamodule
hydra.utils.log.info(f"Instantiating <{cfg.nn.data._target_}>")
pylogger.info(f"Instantiating <{cfg.nn.data._target_}>")
datamodule: pl.LightningDataModule = hydra.utils.instantiate(cfg.nn.data, _recursive_=False)

# Instantiate model
hydra.utils.log.info(f"Instantiating <{cfg.nn.module._target_}>")
pylogger.info(f"Instantiating <{cfg.nn.module._target_}>")
model: pl.LightningModule = hydra.utils.instantiate(
cfg.nn.module,
_recursive_=False,
Expand All @@ -63,12 +72,12 @@ def run(cfg: DictConfig) -> None:
logger = None
if "logger" in cfg.train:
logger_cfg = cfg.train.logger
hydra.utils.log.info(f"Instantiating <{logger_cfg['_target_'].split('.')[-1]}>")
pylogger.info(f"Instantiating <{logger_cfg['_target_'].split('.')[-1]}>")
logger = hydra.utils.instantiate(logger_cfg)

# TODO: incompatible with other loggers! :]
if "wandb_watch" in cfg.train:
hydra.utils.log.info(f"W&B is now watching <{cfg.train.wandb_watch.log}>!")
pylogger.info(f"W&B is now watching <{cfg.train.wandb_watch.log}>!")
logger.watch(
model,
log=cfg.train.wandb_watch.log,
Expand All @@ -79,7 +88,7 @@ def run(cfg: DictConfig) -> None:
yaml_conf: str = OmegaConf.to_yaml(cfg=cfg)
(Path(logger.experiment.dir) / "hparams.yaml").write_text(yaml_conf)

hydra.utils.log.info("Instantiating the Trainer")
pylogger.info("Instantiating the Trainer")

# The Lightning core, the Trainer
trainer = pl.Trainer(
Expand All @@ -90,10 +99,10 @@ def run(cfg: DictConfig) -> None:
)
log_hyperparameters(trainer=trainer, model=model, cfg=cfg)

hydra.utils.log.info("Starting training!")
pylogger.info("Starting training!")
trainer.fit(model=model, datamodule=datamodule)

hydra.utils.log.info("Starting testing!")
pylogger.info("Starting testing!")
trainer.test(datamodule=datamodule)

# Logger closing to release resources/avoid multi-run conflicts
Expand Down

0 comments on commit b629434

Please sign in to comment.