From d15eb30b01067e8b746aae4286a276d578114026 Mon Sep 17 00:00:00 2001 From: Eugene Khvedchenya Date: Wed, 28 Jun 2023 11:23:39 +0300 Subject: [PATCH] Add missing unwrap_model after merge with master --- src/super_gradients/training/sg_trainer/sg_trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/super_gradients/training/sg_trainer/sg_trainer.py b/src/super_gradients/training/sg_trainer/sg_trainer.py index 348736de41..b9b9b01b47 100755 --- a/src/super_gradients/training/sg_trainer/sg_trainer.py +++ b/src/super_gradients/training/sg_trainer/sg_trainer.py @@ -1242,7 +1242,7 @@ def forward(self, inputs, targets): processing_params = self._get_preprocessing_from_valid_loader() if processing_params is not None: - self.net.module.set_dataset_processing_params(**processing_params) + unwrap_model(self.net).set_dataset_processing_params(**processing_params) try: # HEADERS OF THE TRAINING PROGRESS