From f5e10007b7b0ec5345e015b9de7ffec72c5407fd Mon Sep 17 00:00:00 2001 From: SaulLu Date: Tue, 25 Jan 2022 19:13:16 +0100 Subject: [PATCH] remove `tokenizer_file` from herbert slow tokenizer init --- src/transformers/models/herbert/tokenization_herbert.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/transformers/models/herbert/tokenization_herbert.py b/src/transformers/models/herbert/tokenization_herbert.py index bd301ed7fe8fb7..2b8e832e652839 100644 --- a/src/transformers/models/herbert/tokenization_herbert.py +++ b/src/transformers/models/herbert/tokenization_herbert.py @@ -62,7 +62,6 @@ def __init__( self, vocab_file, merges_file, - tokenizer_file=None, cls_token="", unk_token="", pad_token="", @@ -75,7 +74,6 @@ def __init__( super().__init__( vocab_file, merges_file, - tokenizer_file=None, cls_token=cls_token, unk_token=unk_token, pad_token=pad_token,