diff --git a/llmfoundry/models/mpt/modeling_mpt.py b/llmfoundry/models/mpt/modeling_mpt.py index 016473195a..183e1b24f6 100644 --- a/llmfoundry/models/mpt/modeling_mpt.py +++ b/llmfoundry/models/mpt/modeling_mpt.py @@ -367,12 +367,12 @@ def __init__(self, config: MPTConfig): for module in self.modules(): if hasattr(module, 'bias') and isinstance( module.bias, nn.Parameter): - log.info(f'Removing bias from {module=}.') + log.debug(f'Removing bias from {module=}.') module.register_parameter('bias', None) # For transformer engine - if hasattr(module, 'use_bias'): - log.info(f'Setting use_bias=False for {module=}.') + if hasattr(module, 'use_bias') and module.use_bias is True: + log.debug(f'Setting use_bias=False for {module=}.') module.use_bias = False log.debug(self)