From 2b0af42bce05d097a46eb63b50253cfb7928de91 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ziy=C3=BA=20Ye?= Date: Wed, 18 Sep 2024 07:23:05 -0700 Subject: [PATCH] fix the wandb logging issue (#33464) * fix the wandb logging issue * handle ConfigError in WandbCallback; move import to local scope * update integration_utils.py; move import of ConfigError * Update integration_utils.py: remove trailing whitespace --- src/transformers/integrations/integration_utils.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/src/transformers/integrations/integration_utils.py b/src/transformers/integrations/integration_utils.py index 9172f9599f77b0..40298f9c6fc77b 100755 --- a/src/transformers/integrations/integration_utils.py +++ b/src/transformers/integrations/integration_utils.py @@ -803,6 +803,10 @@ def setup(self, args, state, model, **kwargs): if self._wandb is None: return self._initialized = True + + # prepare to handle potential configuration issues during setup + from wandb.sdk.lib.config_util import ConfigError as WandbConfigError + if state.is_world_process_zero: logger.info( 'Automatic Weights & Biases logging enabled, to disable set os.environ["WANDB_DISABLED"] = "true"' @@ -852,7 +856,13 @@ def setup(self, args, state, model, **kwargs): try: self._wandb.config["model/num_parameters"] = model.num_parameters() except AttributeError: - logger.info("Could not log the number of model parameters in Weights & Biases.") + logger.info( + "Could not log the number of model parameters in Weights & Biases due to an AttributeError." + ) + except WandbConfigError: + logger.warning( + "A ConfigError was raised whilst setting the number of model parameters in Weights & Biases config." + ) # log the initial model architecture to an artifact if self._log_model.is_enabled: