Skip to content

Commit

Permalink
Re-enable repo consistency checks
Browse files Browse the repository at this point in the history
  • Loading branch information
calpt committed Nov 23, 2022
1 parent 707daf9 commit 068286d
Show file tree
Hide file tree
Showing 6 changed files with 10 additions and 7 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/tests_torch.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,10 @@ jobs:
run: |
pip install torch==1.12.1
pip install .[quality]
- name: Check Quality
- name: Check Quality and Repo Consistency
run: |
make quality
make repo-consistency
run_reduced_tests_torch:
timeout-minutes: 60
runs-on: ubuntu-latest
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,11 @@ autogenerate_code: deps_table_update
# python utils/check_copies.py
# python utils/check_table.py
# python utils/check_dummies.py
# python utils/tests_fetcher.py --sanity_check
repo-consistency:
python utils/check_repo.py
python utils/check_inits.py
python utils/check_config_docstrings.py
python utils/tests_fetcher.py --sanity_check
python utils/check_adapters.py

# this target runs checks on all files
Expand Down
3 changes: 2 additions & 1 deletion src/transformers/models/beit/modeling_beit.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from ...adapters.context import ForwardContext
from ...adapters.lora import Linear as LoRALinear
from ...adapters.mixins.beit import BeitLayerAdaptersMixin, BeitModelAdaptersMixin, BeitModelWithHeadsAdaptersMixin
from ...adapters.model_mixin import ModelWithHeadsAdaptersMixin
from ...adapters.prefix_tuning import PrefixTuningShim
from ...modeling_outputs import (
BaseModelOutput,
Expand Down Expand Up @@ -1176,7 +1177,7 @@ def forward(self, encoder_hidden_states: torch.Tensor) -> torch.Tensor:
""",
BEIT_START_DOCSTRING,
)
class BeitForSemanticSegmentation(BeitPreTrainedModel):
class BeitForSemanticSegmentation(ModelWithHeadsAdaptersMixin, BeitPreTrainedModel):
def __init__(self, config: BeitConfig) -> None:
super().__init__(config)

Expand Down
2 changes: 1 addition & 1 deletion utils/check_adapters.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
"encoder_decoder",
"t5",
"deberta",
"deberta-v2",
"deberta_v2",
"vit",
]

Expand Down
2 changes: 1 addition & 1 deletion utils/check_config_docstrings.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import importlib
import importlib.util
import inspect
import os
import re
Expand Down
5 changes: 3 additions & 2 deletions utils/check_inits.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def parse_init(init_file):
Read an init_file and parse (per backend) the _import_structure objects defined and the TYPE_CHECKING objects
defined
"""
with open(init_file, "r", encoding="utf-8", newline="\n") as f:
with open(init_file, "r", encoding="utf-8") as f:
lines = f.readlines()

line_index = 0
Expand Down Expand Up @@ -296,4 +296,5 @@ def check_submodules():

if __name__ == "__main__":
check_all_inits()
check_submodules()
# For AH: adapter submodules are not all registered in the main init of Transformers.
# check_submodules()

0 comments on commit 068286d

Please sign in to comment.