diff --git a/src/peft/tuners/adalora.py b/src/peft/tuners/adalora.py index aff877adac..e0140d6acc 100644 --- a/src/peft/tuners/adalora.py +++ b/src/peft/tuners/adalora.py @@ -80,7 +80,7 @@ class AdaLoraModel(LoraModel): peft_type="ADALORA", task_type="SEQ_2_SEQ_LM", r=8, lora_alpha=32, target_modules=["q", "v"], lora_dropout=0.01, ) - >>> model = AutoModelForSeq2SeqLM.from_pretrained("t5-base") >>> model = AdaLoraModel(config, model) + >>> model = AutoModelForSeq2SeqLM.from_pretrained("t5-base") >>> model = AdaLoraModel(model, config, "default") **Attributes**: - **model** ([`transformers.PreTrainedModel`]) -- The model to be adapted. diff --git a/src/peft/tuners/lora.py b/src/peft/tuners/lora.py index a3b97cad3a..af062ddf48 100644 --- a/src/peft/tuners/lora.py +++ b/src/peft/tuners/lora.py @@ -123,11 +123,10 @@ class LoraModel(torch.nn.Module): Example: ```py - >>> from transformers import AutoModelForSeq2SeqLM, LoraConfig + >>> from transformers import AutoModelForSeq2SeqLM >>> from peft import LoraModel, LoraConfig >>> config = LoraConfig( - ... peft_type="LORA", ... task_type="SEQ_2_SEQ_LM", ... r=8, ... lora_alpha=32, @@ -136,7 +135,7 @@ class LoraModel(torch.nn.Module): ... ) >>> model = AutoModelForSeq2SeqLM.from_pretrained("t5-base") - >>> lora_model = LoraModel(config, model) + >>> lora_model = LoraModel(model, config, "default") ``` ```py