Skip to content

Commit 373bdc3

Browse files
Release 0.16.0
- Bump versions - Update a comment to poin to new PR - Remove a test skip that is obsolete after huggingface#2579
1 parent 180777e commit 373bdc3

File tree

4 files changed

+3
-6
lines changed

4 files changed

+3
-6
lines changed

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from setuptools import find_packages, setup
1616

1717

18-
VERSION = "0.15.2.dev0"
18+
VERSION = "0.16.0"
1919

2020
extras = {}
2121
extras["quality"] = [

src/peft/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
__version__ = "0.15.2.dev0"
15+
__version__ = "0.16.0"
1616

1717
from .auto import (
1818
MODEL_TYPE_TO_PEFT_MODEL_MAPPING,

tests/test_gpu_examples.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3807,7 +3807,7 @@ def setUp(self):
38073807
# torchao breaks with fp16 and if a previous test uses fp16, transformers will set this env var, which affects
38083808
# subsequent tests, therefore the env var needs to be cleared explicitly
38093809
#
3810-
# TODO: remove this once https://github.com/huggingface/transformers/pull/34886 is merged
3810+
# TODO: remove this once https://github.com/huggingface/transformers/pull/37259 is merged
38113811
os.environ.pop("ACCELERATE_MIXED_PRECISION", None)
38123812

38133813
def tearDown(self):

tests/testing_common.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1393,9 +1393,6 @@ def _test_peft_model_device_map(self, model_id, config_cls, config_kwargs):
13931393
def _test_training_prompt_learning_tasks(self, model_id, config_cls, config_kwargs):
13941394
if not issubclass(config_cls, PromptLearningConfig):
13951395
return pytest.skip(f"Test not applicable for {config_cls}")
1396-
if ("gemma" in model_id.lower()) and (config_cls == PrefixTuningConfig):
1397-
# TODO might be caused by the 4d causal attention mask of gemma
1398-
return pytest.skip("Prefix tuning + gemma is currently failing")
13991396

14001397
with hub_online_once(model_id):
14011398
model = self.transformers_class.from_pretrained(model_id)

0 commit comments

Comments
 (0)