diff --git a/src/transformers/integrations/integration_utils.py b/src/transformers/integrations/integration_utils.py index 9172f9599f77b0..40298f9c6fc77b 100755 --- a/src/transformers/integrations/integration_utils.py +++ b/src/transformers/integrations/integration_utils.py @@ -803,6 +803,10 @@ def setup(self, args, state, model, **kwargs): if self._wandb is None: return self._initialized = True + + # prepare to handle potential configuration issues during setup + from wandb.sdk.lib.config_util import ConfigError as WandbConfigError + if state.is_world_process_zero: logger.info( 'Automatic Weights & Biases logging enabled, to disable set os.environ["WANDB_DISABLED"] = "true"' @@ -852,7 +856,13 @@ def setup(self, args, state, model, **kwargs): try: self._wandb.config["model/num_parameters"] = model.num_parameters() except AttributeError: - logger.info("Could not log the number of model parameters in Weights & Biases.") + logger.info( + "Could not log the number of model parameters in Weights & Biases due to an AttributeError." + ) + except WandbConfigError: + logger.warning( + "A ConfigError was raised whilst setting the number of model parameters in Weights & Biases config." + ) # log the initial model architecture to an artifact if self._log_model.is_enabled: