Skip to content

Commit

Permalink
Add require_torch to tokenizer tests
Browse files Browse the repository at this point in the history
  • Loading branch information
NielsRogge committed Apr 26, 2021
1 parent 36dba81 commit c29e2f9
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
4 changes: 2 additions & 2 deletions src/transformers/models/luke/modeling_luke.py
Original file line number Diff line number Diff line change
Expand Up @@ -689,7 +689,7 @@ class LukePreTrainedModel(PreTrainedModel):
base_model_prefix = "luke"

def _init_weights(self, module: nn.Module):
""" Initialize the weights """
"""Initialize the weights"""
if isinstance(module, nn.Linear):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.bias is not None:
Expand Down Expand Up @@ -1074,7 +1074,7 @@ def forward(
>>> outputs = model(**inputs)
>>> logits = outputs.logits
>>> predicted_class_idx = logits.argmax(-1).item()
>>> print("Predicted class:", model.config.id2label[predicted_class_idx])
>>> print("Predicted class:", model.config.id2label[predicted_class_idx])
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict

Expand Down
2 changes: 1 addition & 1 deletion tests/test_tokenization_luke.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import unittest

from transformers import AddedToken, LukeTokenizer
from transformers.testing_utils import slow, require_torch
from transformers.testing_utils import require_torch, slow

from .test_tokenization_common import TokenizerTesterMixin

Expand Down

0 comments on commit c29e2f9

Please sign in to comment.