From 17619e80e74ac173b4462746937e639497b0f07d Mon Sep 17 00:00:00 2001 From: Sylvain Gugger Date: Mon, 3 May 2021 20:08:35 -0400 Subject: [PATCH 1/7] Make quality scripts work when one backend is missing. --- utils/check_repo.py | 38 +++++++++++++++++++++++++++++--------- 1 file changed, 29 insertions(+), 9 deletions(-) diff --git a/utils/check_repo.py b/utils/check_repo.py index 019a30893db5c5..aa15ea3ed36dce 100644 --- a/utils/check_repo.py +++ b/utils/check_repo.py @@ -17,8 +17,10 @@ import inspect import os import re +import warnings from pathlib import Path +from transformers import is_flax_available, is_tf_available, is_torch_available from transformers.models.auto import get_values @@ -250,15 +252,18 @@ def check_all_models_are_tested(): def get_all_auto_configured_models(): """Return the list of all models in at least one auto class.""" result = set() # To avoid duplicates we concatenate all model classes in a set. - for attr_name in dir(transformers.models.auto.modeling_auto): - if attr_name.startswith("MODEL_") and attr_name.endswith("MAPPING"): - result = result | set(get_values(getattr(transformers.models.auto.modeling_auto, attr_name))) - for attr_name in dir(transformers.models.auto.modeling_tf_auto): - if attr_name.startswith("TF_MODEL_") and attr_name.endswith("MAPPING"): - result = result | set(get_values(getattr(transformers.models.auto.modeling_tf_auto, attr_name))) - for attr_name in dir(transformers.models.auto.modeling_flax_auto): - if attr_name.startswith("FLAX_MODEL_") and attr_name.endswith("MAPPING"): - result = result | set(get_values(getattr(transformers.models.auto.modeling_flax_auto, attr_name))) + if is_torch_available(): + for attr_name in dir(transformers.models.auto.modeling_auto): + if attr_name.startswith("MODEL_") and attr_name.endswith("MAPPING"): + result = result | set(get_values(getattr(transformers.models.auto.modeling_auto, attr_name))) + if is_tf_available(): + for attr_name in dir(transformers.models.auto.modeling_tf_auto): + if attr_name.startswith("TF_MODEL_") and attr_name.endswith("MAPPING"): + result = result | set(get_values(getattr(transformers.models.auto.modeling_tf_auto, attr_name))) + if is_flax_available(): + for attr_name in dir(transformers.models.auto.modeling_flax_auto): + if attr_name.startswith("FLAX_MODEL_") and attr_name.endswith("MAPPING"): + result = result | set(get_values(getattr(transformers.models.auto.modeling_flax_auto, attr_name))) return [cls.__name__ for cls in result] @@ -289,6 +294,21 @@ def check_models_are_auto_configured(module, all_auto_models): def check_all_models_are_auto_configured(): """Check all models are each in an auto class.""" + missing_backends = [] + if not is_torch_available(): + missing_backends.append("PyTorch") + if not is_tf_available(): + missing_backends.append("TensorFlow") + if not is_flax_available(): + missing_backends.append("Flax") + if len(missing_backends) > 0: + missing = ", ".join(missing_backends) + warnings.warn( + "Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the " + f"Transformers repo, the following are missing: {missing}. While it's probably fine as long as you didn't " + "make any change in one of those backends modeling files, you should probably execute the command above " + "to be on the safe side." + ) modules = get_model_modules() all_auto_models = get_all_auto_configured_models() failures = [] From 5b5c4742090218c7447dc984885ad997b2138b04 Mon Sep 17 00:00:00 2001 From: Sylvain Gugger Date: Tue, 4 May 2021 09:08:41 -0400 Subject: [PATCH 2/7] Check env variable is properly set --- utils/check_repo.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/utils/check_repo.py b/utils/check_repo.py index aa15ea3ed36dce..a94495eb90a854 100644 --- a/utils/check_repo.py +++ b/utils/check_repo.py @@ -21,6 +21,7 @@ from pathlib import Path from transformers import is_flax_available, is_tf_available, is_torch_available +from transformers.file_utils import ENV_VARS_TRUE_VALUES from transformers.models.auto import get_values @@ -294,6 +295,8 @@ def check_models_are_auto_configured(module, all_auto_models): def check_all_models_are_auto_configured(): """Check all models are each in an auto class.""" + if os.getenv("TRANSFORMERS_IS_CI").upper() in ENV_VARS_TRUE_VALUES: + raise Exception("Variable is properly set.") missing_backends = [] if not is_torch_available(): missing_backends.append("PyTorch") @@ -303,12 +306,18 @@ def check_all_models_are_auto_configured(): missing_backends.append("Flax") if len(missing_backends) > 0: missing = ", ".join(missing_backends) - warnings.warn( - "Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the " - f"Transformers repo, the following are missing: {missing}. While it's probably fine as long as you didn't " - "make any change in one of those backends modeling files, you should probably execute the command above " - "to be on the safe side." - ) + if os.getenv("TRANSFORMERS_IS_CI").upper() in ENV_VARS_TRUE_VALUES: + raise Exception( + "Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the " + f"Transformers repo, the following are missing: {missing}." + ) + else: + warnings.warn( + "Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the " + f"Transformers repo, the following are missing: {missing}. While it's probably fine as long as you " + "didn't make any change in one of those backends modeling files, you should probably execute the " + "command above to be on the safe side." + ) modules = get_model_modules() all_auto_models = get_all_auto_configured_models() failures = [] From 0e464eda402085107ae41c1acbcd5d2000b32529 Mon Sep 17 00:00:00 2001 From: Sylvain Gugger Date: Tue, 4 May 2021 09:13:55 -0400 Subject: [PATCH 3/7] Add default --- utils/check_repo.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/check_repo.py b/utils/check_repo.py index a94495eb90a854..a5966afd9270dc 100644 --- a/utils/check_repo.py +++ b/utils/check_repo.py @@ -295,7 +295,7 @@ def check_models_are_auto_configured(module, all_auto_models): def check_all_models_are_auto_configured(): """Check all models are each in an auto class.""" - if os.getenv("TRANSFORMERS_IS_CI").upper() in ENV_VARS_TRUE_VALUES: + if os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES: raise Exception("Variable is properly set.") missing_backends = [] if not is_torch_available(): @@ -306,7 +306,7 @@ def check_all_models_are_auto_configured(): missing_backends.append("Flax") if len(missing_backends) > 0: missing = ", ".join(missing_backends) - if os.getenv("TRANSFORMERS_IS_CI").upper() in ENV_VARS_TRUE_VALUES: + if os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES: raise Exception( "Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the " f"Transformers repo, the following are missing: {missing}." From 666b02cc4dfff5d088bda079bfc4130bb3f3c9a4 Mon Sep 17 00:00:00 2001 From: Sylvain Gugger Date: Tue, 4 May 2021 09:29:59 -0400 Subject: [PATCH 4/7] With print statements --- utils/check_repo.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/utils/check_repo.py b/utils/check_repo.py index a5966afd9270dc..7020e0beb4ddaa 100644 --- a/utils/check_repo.py +++ b/utils/check_repo.py @@ -295,6 +295,8 @@ def check_models_are_auto_configured(module, all_auto_models): def check_all_models_are_auto_configured(): """Check all models are each in an auto class.""" + print(f"TRANFORMERS_IS_CI: {os.getenv("TRANSFORMERS_IS_CI", "").upper()}") + print(os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES) if os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES: raise Exception("Variable is properly set.") missing_backends = [] From 865f17c0a056ede9f8ee32fbeff1ee24a86f3be3 Mon Sep 17 00:00:00 2001 From: Sylvain Gugger Date: Tue, 4 May 2021 09:32:58 -0400 Subject: [PATCH 5/7] Fix typo --- utils/check_repo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/check_repo.py b/utils/check_repo.py index 7020e0beb4ddaa..37984be00836a4 100644 --- a/utils/check_repo.py +++ b/utils/check_repo.py @@ -295,7 +295,7 @@ def check_models_are_auto_configured(module, all_auto_models): def check_all_models_are_auto_configured(): """Check all models are each in an auto class.""" - print(f"TRANFORMERS_IS_CI: {os.getenv("TRANSFORMERS_IS_CI", "").upper()}") + print(f'TRANFORMERS_IS_CI: {os.getenv("TRANSFORMERS_IS_CI", "").upper()}') print(os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES) if os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES: raise Exception("Variable is properly set.") From a6fd62453a08367537146ac62860e45619fec854 Mon Sep 17 00:00:00 2001 From: Sylvain Gugger Date: Tue, 4 May 2021 09:37:08 -0400 Subject: [PATCH 6/7] Set env variable --- .circleci/config.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 42b536792f9e6a..5e90d8d5461b8a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -391,6 +391,8 @@ jobs: docker: - image: circleci/python:3.6 resource_class: medium + environment: + TRANSFORMERS_IS_CI: yes parallelism: 1 steps: - checkout From 3b656b86bc782fa49a8662d2a795dd063e03561d Mon Sep 17 00:00:00 2001 From: Sylvain Gugger Date: Tue, 4 May 2021 09:40:55 -0400 Subject: [PATCH 7/7] Remove debug code --- utils/check_repo.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/utils/check_repo.py b/utils/check_repo.py index 37984be00836a4..c368ddd5b2e109 100644 --- a/utils/check_repo.py +++ b/utils/check_repo.py @@ -295,10 +295,6 @@ def check_models_are_auto_configured(module, all_auto_models): def check_all_models_are_auto_configured(): """Check all models are each in an auto class.""" - print(f'TRANFORMERS_IS_CI: {os.getenv("TRANSFORMERS_IS_CI", "").upper()}') - print(os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES) - if os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES: - raise Exception("Variable is properly set.") missing_backends = [] if not is_torch_available(): missing_backends.append("PyTorch")