From 068286d1d0d35022dd6fb286e42978246a1d3a85 Mon Sep 17 00:00:00 2001 From: calpt <36051308+calpt@users.noreply.github.com> Date: Wed, 23 Nov 2022 12:40:51 +0100 Subject: [PATCH] Re-enable repo consistency checks --- .github/workflows/tests_torch.yml | 3 ++- Makefile | 2 +- src/transformers/models/beit/modeling_beit.py | 3 ++- utils/check_adapters.py | 2 +- utils/check_config_docstrings.py | 2 +- utils/check_inits.py | 5 +++-- 6 files changed, 10 insertions(+), 7 deletions(-) diff --git a/.github/workflows/tests_torch.yml b/.github/workflows/tests_torch.yml index 6ce9485fc6..d9ecb9a660 100644 --- a/.github/workflows/tests_torch.yml +++ b/.github/workflows/tests_torch.yml @@ -37,9 +37,10 @@ jobs: run: | pip install torch==1.12.1 pip install .[quality] - - name: Check Quality + - name: Check Quality and Repo Consistency run: | make quality + make repo-consistency run_reduced_tests_torch: timeout-minutes: 60 runs-on: ubuntu-latest diff --git a/Makefile b/Makefile index aa8d05ed6b..41d6a3d9b3 100644 --- a/Makefile +++ b/Makefile @@ -37,11 +37,11 @@ autogenerate_code: deps_table_update # python utils/check_copies.py # python utils/check_table.py # python utils/check_dummies.py +# python utils/tests_fetcher.py --sanity_check repo-consistency: python utils/check_repo.py python utils/check_inits.py python utils/check_config_docstrings.py - python utils/tests_fetcher.py --sanity_check python utils/check_adapters.py # this target runs checks on all files diff --git a/src/transformers/models/beit/modeling_beit.py b/src/transformers/models/beit/modeling_beit.py index 07745f85ae..ea8903773b 100755 --- a/src/transformers/models/beit/modeling_beit.py +++ b/src/transformers/models/beit/modeling_beit.py @@ -29,6 +29,7 @@ from ...adapters.context import ForwardContext from ...adapters.lora import Linear as LoRALinear from ...adapters.mixins.beit import BeitLayerAdaptersMixin, BeitModelAdaptersMixin, BeitModelWithHeadsAdaptersMixin +from ...adapters.model_mixin import ModelWithHeadsAdaptersMixin from ...adapters.prefix_tuning import PrefixTuningShim from ...modeling_outputs import ( BaseModelOutput, @@ -1176,7 +1177,7 @@ def forward(self, encoder_hidden_states: torch.Tensor) -> torch.Tensor: """, BEIT_START_DOCSTRING, ) -class BeitForSemanticSegmentation(BeitPreTrainedModel): +class BeitForSemanticSegmentation(ModelWithHeadsAdaptersMixin, BeitPreTrainedModel): def __init__(self, config: BeitConfig) -> None: super().__init__(config) diff --git a/utils/check_adapters.py b/utils/check_adapters.py index bd009be110..5e47218b20 100644 --- a/utils/check_adapters.py +++ b/utils/check_adapters.py @@ -16,7 +16,7 @@ "encoder_decoder", "t5", "deberta", - "deberta-v2", + "deberta_v2", "vit", ] diff --git a/utils/check_config_docstrings.py b/utils/check_config_docstrings.py index bcbbace39e..456d888882 100644 --- a/utils/check_config_docstrings.py +++ b/utils/check_config_docstrings.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import importlib +import importlib.util import inspect import os import re diff --git a/utils/check_inits.py b/utils/check_inits.py index 98d4caf010..600633592b 100644 --- a/utils/check_inits.py +++ b/utils/check_inits.py @@ -61,7 +61,7 @@ def parse_init(init_file): Read an init_file and parse (per backend) the _import_structure objects defined and the TYPE_CHECKING objects defined """ - with open(init_file, "r", encoding="utf-8", newline="\n") as f: + with open(init_file, "r", encoding="utf-8") as f: lines = f.readlines() line_index = 0 @@ -296,4 +296,5 @@ def check_submodules(): if __name__ == "__main__": check_all_inits() - check_submodules() + # For AH: adapter submodules are not all registered in the main init of Transformers. + # check_submodules()