diff --git a/src/transformers/tokenization_utils_fast.py b/src/transformers/tokenization_utils_fast.py index d6690dda560ebe..5684cad1e077f1 100644 --- a/src/transformers/tokenization_utils_fast.py +++ b/src/transformers/tokenization_utils_fast.py @@ -16,7 +16,6 @@ Tokenization classes for fast tokenizers (provided by HuggingFace's tokenizers library). For slow (python) tokenizers see tokenization_utils.py """ -import copy import json import os from collections import defaultdict @@ -105,7 +104,7 @@ def __init__(self, *args, **kwargs): ) if tokenizer_object is not None: - fast_tokenizer = copy.deepcopy(tokenizer_object) + fast_tokenizer = tokenizer_object elif fast_tokenizer_file is not None and not from_slow: # We have a serialization from tokenizers which let us directly build the backend fast_tokenizer = TokenizerFast.from_file(fast_tokenizer_file)