From e098abe65b836191a50d0c314e3c620480b8e7a0 Mon Sep 17 00:00:00 2001 From: rohitgr7 Date: Mon, 16 Nov 2020 01:10:38 +0530 Subject: [PATCH] logging with fdr --- pytorch_lightning/trainer/connectors/debugging_connector.py | 4 +--- .../trainer/connectors/logger_connector/logger_connector.py | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/debugging_connector.py b/pytorch_lightning/trainer/connectors/debugging_connector.py index 1ea4f733a2a2f..b8703d28e9ba7 100644 --- a/pytorch_lightning/trainer/connectors/debugging_connector.py +++ b/pytorch_lightning/trainer/connectors/debugging_connector.py @@ -44,9 +44,7 @@ def on_init_start( ) self.trainer.fast_dev_run = fast_dev_run - - if fast_dev_run is True: - fast_dev_run = 1 + fast_dev_run = int(fast_dev_run) if fast_dev_run: limit_train_batches = fast_dev_run diff --git a/pytorch_lightning/trainer/connectors/logger_connector/logger_connector.py b/pytorch_lightning/trainer/connectors/logger_connector/logger_connector.py index 9eacdb6dc389b..8c346a421c65e 100644 --- a/pytorch_lightning/trainer/connectors/logger_connector/logger_connector.py +++ b/pytorch_lightning/trainer/connectors/logger_connector/logger_connector.py @@ -613,7 +613,7 @@ def __gather_result_across_time_and_optimizers(self, epoch_output): def log_train_step_metrics(self, batch_output): # when metrics should be logged - if self.should_update_logs or self.trainer.fast_dev_run: + if self.should_update_logs or (int(self.trainer.fast_dev_run) == 1): # logs user requested information to logger metrics = self.cached_results.get_latest_batch_log_metrics() grad_norm_dic = batch_output.grad_norm_dic