Skip to content

Commit ec4e5a7

Browse files
BenjaminBossansirluk
authored andcommitted
FIX Missing low_cpu_mem_usage argument (huggingface#2156)
The newly introduced low_cpu_mem_usage argument was not propagated to the add_adapter method of all PeftModel task types. This is now fixed and tests were added.
1 parent f3c2cf7 commit ec4e5a7

4 files changed

+28
-7
lines changed

src/peft/peft_model.py

+18-6
Original file line numberDiff line numberDiff line change
@@ -1421,7 +1421,7 @@ def __init__(
14211421
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
14221422
_set_trainable(self, adapter_name)
14231423

1424-
def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
1424+
def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
14251425
"""
14261426
Add an adapter to the model based on the passed configuration.
14271427
@@ -1437,6 +1437,10 @@ def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
14371437
The name of the adapter to be added.
14381438
peft_config ([`PeftConfig`]):
14391439
The configuration of the adapter to be added.
1440+
low_cpu_mem_usage (`bool`, `optional`, defaults to `False`):
1441+
Create empty adapter weights on meta device. Useful to speed up the process when loading saved
1442+
adapters. Don't use this option when creating a new PEFT adapter for training.
1443+
14401444
"""
14411445
# ensure that additional adapters also add the classifier layer to modules_to_save
14421446
if hasattr(peft_config, "modules_to_save"):
@@ -1446,7 +1450,7 @@ def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
14461450
else:
14471451
peft_config.modules_to_save.extend(classifier_module_names)
14481452

1449-
return super().add_adapter(adapter_name, peft_config)
1453+
return super().add_adapter(adapter_name, peft_config, low_cpu_mem_usage=low_cpu_mem_usage)
14501454

14511455
def forward(
14521456
self,
@@ -2140,7 +2144,7 @@ def __init__(
21402144
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
21412145
_set_trainable(self, adapter_name)
21422146

2143-
def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
2147+
def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
21442148
"""
21452149
Add an adapter to the model based on the passed configuration.
21462150
@@ -2156,6 +2160,10 @@ def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
21562160
The name of the adapter to be added.
21572161
peft_config ([`PeftConfig`]):
21582162
The configuration of the adapter to be added.
2163+
low_cpu_mem_usage (`bool`, `optional`, defaults to `False`):
2164+
Create empty adapter weights on meta device. Useful to speed up the process when loading saved
2165+
adapters. Don't use this option when creating a new PEFT adapter for training.
2166+
21592167
"""
21602168
# ensure that additional adapters also add the classifier layer to modules_to_save
21612169
if hasattr(peft_config, "modules_to_save"):
@@ -2165,7 +2173,7 @@ def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
21652173
else:
21662174
peft_config.modules_to_save.extend(classifier_module_names)
21672175

2168-
return super().add_adapter(adapter_name, peft_config)
2176+
return super().add_adapter(adapter_name, peft_config, low_cpu_mem_usage=low_cpu_mem_usage)
21692177

21702178
def forward(
21712179
self,
@@ -2357,7 +2365,7 @@ def __init__(
23572365
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
23582366
_set_trainable(self, adapter_name)
23592367

2360-
def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
2368+
def add_adapter(self, adapter_name: str, peft_config: PeftConfig, low_cpu_mem_usage: bool = False) -> None:
23612369
"""
23622370
Add an adapter to the model based on the passed configuration.
23632371
@@ -2373,6 +2381,10 @@ def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
23732381
The name of the adapter to be added.
23742382
peft_config ([`PeftConfig`]):
23752383
The configuration of the adapter to be added.
2384+
low_cpu_mem_usage (`bool`, `optional`, defaults to `False`):
2385+
Create empty adapter weights on meta device. Useful to speed up the process when loading saved
2386+
adapters. Don't use this option when creating a new PEFT adapter for training.
2387+
23762388
"""
23772389
# ensure that additional adapters also add the classifier layer to modules_to_save
23782390
if hasattr(peft_config, "modules_to_save"):
@@ -2382,7 +2394,7 @@ def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
23822394
else:
23832395
peft_config.modules_to_save.extend(qa_module_names)
23842396

2385-
return super().add_adapter(adapter_name, peft_config)
2397+
return super().add_adapter(adapter_name, peft_config, low_cpu_mem_usage=low_cpu_mem_usage)
23862398

23872399
def forward(
23882400
self,

tests/test_decoder_models.py

+3
Original file line numberDiff line numberDiff line change
@@ -200,6 +200,9 @@ def test_save_pretrained_selected_adapters(self, test_name, model_id, config_cls
200200
def test_save_pretrained_selected_adapters_pickle(self, test_name, model_id, config_cls, config_kwargs):
201201
self._test_save_pretrained_selected_adapters(model_id, config_cls, config_kwargs, safe_serialization=False)
202202

203+
def test_load_model_low_cpu_mem_usage(self):
204+
self._test_load_model_low_cpu_mem_usage(PEFT_DECODER_MODELS_TO_TEST[0], LoraConfig, {})
205+
203206
@parameterized.expand(
204207
PeftTestConfigManager.get_grid_parameters(FULL_GRID, filter_params_func=skip_oft_or_hra_and_gpt2)
205208
)

tests/test_encoder_decoder_models.py

+3
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,9 @@ def test_save_pretrained_selected_adapters(self, test_name, model_id, config_cls
8282
def test_save_pretrained_selected_adapters_pickle(self, test_name, model_id, config_cls, config_kwargs):
8383
self._test_save_pretrained_selected_adapters(model_id, config_cls, config_kwargs, safe_serialization=False)
8484

85+
def test_load_model_low_cpu_mem_usage(self):
86+
self._test_load_model_low_cpu_mem_usage(PEFT_ENCODER_DECODER_MODELS_TO_TEST[0], LoraConfig, {})
87+
8588
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
8689
def test_from_pretrained_config_construction(self, test_name, model_id, config_cls, config_kwargs):
8790
self._test_from_pretrained_config_construction(model_id, config_cls, config_kwargs)

tests/test_feature_extraction_models.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
from parameterized import parameterized
1818
from transformers import AutoModel
1919

20-
from peft import PrefixTuningConfig, PromptLearningConfig
20+
from peft import LoraConfig, PrefixTuningConfig, PromptLearningConfig
2121

2222
from .testing_common import PeftCommonTester, PeftTestConfigManager
2323

@@ -99,6 +99,9 @@ def test_save_pretrained(self, test_name, model_id, config_cls, config_kwargs):
9999
def test_save_pretrained_selected_adapters(self, test_name, model_id, config_cls, config_kwargs):
100100
self._test_save_pretrained_selected_adapters(model_id, config_cls, config_kwargs)
101101

102+
def test_load_model_low_cpu_mem_usage(self):
103+
self._test_load_model_low_cpu_mem_usage(PEFT_FEATURE_EXTRACTION_MODELS_TO_TEST[0], LoraConfig, {})
104+
102105
@parameterized.expand(PeftTestConfigManager.get_grid_parameters(FULL_GRID))
103106
def test_from_pretrained_config_construction(self, test_name, model_id, config_cls, config_kwargs):
104107
self._test_from_pretrained_config_construction(model_id, config_cls, config_kwargs)

0 commit comments

Comments
 (0)