Skip to content

Commit

Permalink
Skip BloomEmbeddingTest.test_embeddings for PyTorch < 1.10 (hugging…
Browse files Browse the repository at this point in the history
…face#19261)

Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
  • Loading branch information
2 people authored and ajsanjoaquin committed Oct 12, 2022
1 parent 3b13064 commit 3fea093
Showing 1 changed file with 7 additions and 1 deletion.
8 changes: 7 additions & 1 deletion tests/models/bloom/test_modeling_bloom.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
BloomModel,
BloomTokenizerFast,
)
from transformers.pytorch_utils import is_torch_greater_or_equal_than_1_10


@require_torch
Expand Down Expand Up @@ -500,9 +501,14 @@ def setUp(self):
super().setUp()
self.path_bigscience_model = "bigscience/bigscience-small-testing"

@unittest.skipIf(
not is_torch_available() or not is_torch_greater_or_equal_than_1_10,
"Test failed with torch < 1.10 (`LayerNormKernelImpl` not implemented for `BFloat16`)",
)
@require_torch
def test_embeddings(self):
model = BloomForCausalLM.from_pretrained(self.path_bigscience_model, torch_dtype="auto") # load in fp32
# The config in this checkpoint has `bfloat16` as `torch_dtype` -> model in `bfloat16`
model = BloomForCausalLM.from_pretrained(self.path_bigscience_model, torch_dtype="auto")
model.eval()

EMBEDDINGS_DS_BEFORE_LN_BF_16_MEAN = {
Expand Down

0 comments on commit 3fea093

Please sign in to comment.