@@ -1421,7 +1421,7 @@ def __init__(
1421
1421
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
1422
1422
_set_trainable (self , adapter_name )
1423
1423
1424
- def add_adapter (self , adapter_name : str , peft_config : PeftConfig ) -> None :
1424
+ def add_adapter (self , adapter_name : str , peft_config : PeftConfig , low_cpu_mem_usage : bool = False ) -> None :
1425
1425
"""
1426
1426
Add an adapter to the model based on the passed configuration.
1427
1427
@@ -1437,6 +1437,10 @@ def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
1437
1437
The name of the adapter to be added.
1438
1438
peft_config ([`PeftConfig`]):
1439
1439
The configuration of the adapter to be added.
1440
+ low_cpu_mem_usage (`bool`, `optional`, defaults to `False`):
1441
+ Create empty adapter weights on meta device. Useful to speed up the process when loading saved
1442
+ adapters. Don't use this option when creating a new PEFT adapter for training.
1443
+
1440
1444
"""
1441
1445
# ensure that additional adapters also add the classifier layer to modules_to_save
1442
1446
if hasattr (peft_config , "modules_to_save" ):
@@ -1446,7 +1450,7 @@ def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
1446
1450
else :
1447
1451
peft_config .modules_to_save .extend (classifier_module_names )
1448
1452
1449
- return super ().add_adapter (adapter_name , peft_config )
1453
+ return super ().add_adapter (adapter_name , peft_config , low_cpu_mem_usage = low_cpu_mem_usage )
1450
1454
1451
1455
def forward (
1452
1456
self ,
@@ -2140,7 +2144,7 @@ def __init__(
2140
2144
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
2141
2145
_set_trainable (self , adapter_name )
2142
2146
2143
- def add_adapter (self , adapter_name : str , peft_config : PeftConfig ) -> None :
2147
+ def add_adapter (self , adapter_name : str , peft_config : PeftConfig , low_cpu_mem_usage : bool = False ) -> None :
2144
2148
"""
2145
2149
Add an adapter to the model based on the passed configuration.
2146
2150
@@ -2156,6 +2160,10 @@ def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
2156
2160
The name of the adapter to be added.
2157
2161
peft_config ([`PeftConfig`]):
2158
2162
The configuration of the adapter to be added.
2163
+ low_cpu_mem_usage (`bool`, `optional`, defaults to `False`):
2164
+ Create empty adapter weights on meta device. Useful to speed up the process when loading saved
2165
+ adapters. Don't use this option when creating a new PEFT adapter for training.
2166
+
2159
2167
"""
2160
2168
# ensure that additional adapters also add the classifier layer to modules_to_save
2161
2169
if hasattr (peft_config , "modules_to_save" ):
@@ -2165,7 +2173,7 @@ def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
2165
2173
else :
2166
2174
peft_config .modules_to_save .extend (classifier_module_names )
2167
2175
2168
- return super ().add_adapter (adapter_name , peft_config )
2176
+ return super ().add_adapter (adapter_name , peft_config , low_cpu_mem_usage = low_cpu_mem_usage )
2169
2177
2170
2178
def forward (
2171
2179
self ,
@@ -2357,7 +2365,7 @@ def __init__(
2357
2365
# to make sure classifier layer is trainable; this may add a new ModulesToSaveWrapper
2358
2366
_set_trainable (self , adapter_name )
2359
2367
2360
- def add_adapter (self , adapter_name : str , peft_config : PeftConfig ) -> None :
2368
+ def add_adapter (self , adapter_name : str , peft_config : PeftConfig , low_cpu_mem_usage : bool = False ) -> None :
2361
2369
"""
2362
2370
Add an adapter to the model based on the passed configuration.
2363
2371
@@ -2373,6 +2381,10 @@ def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
2373
2381
The name of the adapter to be added.
2374
2382
peft_config ([`PeftConfig`]):
2375
2383
The configuration of the adapter to be added.
2384
+ low_cpu_mem_usage (`bool`, `optional`, defaults to `False`):
2385
+ Create empty adapter weights on meta device. Useful to speed up the process when loading saved
2386
+ adapters. Don't use this option when creating a new PEFT adapter for training.
2387
+
2376
2388
"""
2377
2389
# ensure that additional adapters also add the classifier layer to modules_to_save
2378
2390
if hasattr (peft_config , "modules_to_save" ):
@@ -2382,7 +2394,7 @@ def add_adapter(self, adapter_name: str, peft_config: PeftConfig) -> None:
2382
2394
else :
2383
2395
peft_config .modules_to_save .extend (qa_module_names )
2384
2396
2385
- return super ().add_adapter (adapter_name , peft_config )
2397
+ return super ().add_adapter (adapter_name , peft_config , low_cpu_mem_usage = low_cpu_mem_usage )
2386
2398
2387
2399
def forward (
2388
2400
self ,
0 commit comments