From e7f7504cde85959ba7090f6700a851a73c2c48a0 Mon Sep 17 00:00:00 2001 From: Mihir Patel Date: Tue, 2 Apr 2024 14:54:16 -0400 Subject: [PATCH] remove --- llmfoundry/models/mpt/modeling_mpt.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/llmfoundry/models/mpt/modeling_mpt.py b/llmfoundry/models/mpt/modeling_mpt.py index 016473195a..183e1b24f6 100644 --- a/llmfoundry/models/mpt/modeling_mpt.py +++ b/llmfoundry/models/mpt/modeling_mpt.py @@ -367,12 +367,12 @@ def __init__(self, config: MPTConfig): for module in self.modules(): if hasattr(module, 'bias') and isinstance( module.bias, nn.Parameter): - log.info(f'Removing bias from {module=}.') + log.debug(f'Removing bias from {module=}.') module.register_parameter('bias', None) # For transformer engine - if hasattr(module, 'use_bias'): - log.info(f'Setting use_bias=False for {module=}.') + if hasattr(module, 'use_bias') and module.use_bias is True: + log.debug(f'Setting use_bias=False for {module=}.') module.use_bias = False log.debug(self)