From 1a7773e84c25dca5eb3565326bc9cc3d132a5ab1 Mon Sep 17 00:00:00 2001 From: Carlos Mocholi Date: Fri, 15 Oct 2021 22:15:33 +0200 Subject: [PATCH] Add back comment --- pytorch_lightning/trainer/trainer.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 3ad8fba07eae6..e6d8ccde91d71 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -1412,6 +1412,7 @@ def call_hook( if hook_name in ("on_train_start",) and hasattr(self.accelerator, hook_name): accelerator_hook = getattr(self.accelerator, hook_name) accelerator_output = accelerator_hook(*args, **kwargs) + # Rely on the accelerator output if lightningModule hook returns nothing # Required for cases such as DataParallel where we reduce the output for the user # todo: move this data parallel logic into the data parallel plugin output = accelerator_output if output is None else output