diff --git a/tests/test_tokenization_common.py b/tests/test_tokenization_common.py index 7aa1bbf44397..f1f7afca62d7 100644 --- a/tests/test_tokenization_common.py +++ b/tests/test_tokenization_common.py @@ -69,13 +69,14 @@ def merge_model_tokenizer_mappings( model_tokenizer_mapping = OrderedDict([]) for configuration in configurations: - model = model_mapping[configuration] - tokenizer = tokenizer_mapping[configuration][0] - tokenizer_fast = tokenizer_mapping[configuration][1] - - model_tokenizer_mapping.update({tokenizer: (configuration, model)}) - if tokenizer_fast is not None: - model_tokenizer_mapping.update({tokenizer_fast: (configuration, model)}) + if configuration in model_mapping and configuration in tokenizer_mapping: + model = model_mapping[configuration] + tokenizer = tokenizer_mapping[configuration][0] + tokenizer_fast = tokenizer_mapping[configuration][1] + + model_tokenizer_mapping.update({tokenizer: (configuration, model)}) + if tokenizer_fast is not None: + model_tokenizer_mapping.update({tokenizer_fast: (configuration, model)}) return model_tokenizer_mapping