diff --git a/ludwig/models/llm.py b/ludwig/models/llm.py index c632cccc1db..d74a16e56c0 100644 --- a/ludwig/models/llm.py +++ b/ludwig/models/llm.py @@ -226,24 +226,6 @@ def initialize_adapter(self): from peft import MODEL_TYPE_TO_PEFT_MODEL_MAPPING, PeftConfig peft_config = PeftConfig.from_pretrained(self.config_obj.adapter.pretrained_adapter_weights) - peft_dict = peft_config.to_dict() - - # Need to update the peft config with some of the values from config_obj because not all of them are set - for param_name, param_value in self.config_obj.adapter.to_config().to_dict().items(): - # Not all parameters are supported by all models, so we only add the parameter to the load kwargs - # if it is supported by the model. - if param_value is None: - # param_name and param_value come from the config object and contain default - # values for the adapter. Examples of parameters with missing values might be: - # 'auto_mapping', 'base_model_name_or_path', and 'task_type'. - # Note that some of these values might already be set in peft_config, which comes from HF - # directly (specifically, adapter_config.json in the model repo), and we don't want to override - # those values with None. - continue - if param_name not in peft_dict: - # If any parameters are not set in adapter_config.json in HF, we want to populate them with the - # appropriate default values. - setattr(peft_config, param_name, param_value) self.model = MODEL_TYPE_TO_PEFT_MODEL_MAPPING[peft_config.task_type].from_pretrained( self.model, self.config_obj.adapter.pretrained_adapter_weights