From b4b31773d7d8d7d9da065f1ff10b47a8af8214ff Mon Sep 17 00:00:00 2001 From: "Wang, Yi" Date: Fri, 16 Jun 2023 13:57:38 -0400 Subject: [PATCH] when from_pretrained is called in retraining case of lora with flag "is_trainable" True, should not call model.eval() --- src/peft/peft_model.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/peft/peft_model.py b/src/peft/peft_model.py index cb9d3bf159..a386d38cfe 100644 --- a/src/peft/peft_model.py +++ b/src/peft/peft_model.py @@ -508,7 +508,8 @@ def load_adapter(self, model_id, adapter_name, is_trainable=False, **kwargs): add_hook_to_module(self.get_base_model(), hook) # Set model in evaluation mode to deactivate Dropout modules by default - self.eval() + if not is_trainable: + self.eval() return load_result def set_adapter(self, adapter_name):