From 30f7b2abe6991fe6e565f17f7d0701e80ecba0d3 Mon Sep 17 00:00:00 2001 From: Andrey Anshin Date: Mon, 4 Mar 2024 12:43:53 +0400 Subject: [PATCH] Avoid to use too broad `noqa` (#37862) --- airflow/api/common/experimental/delete_dag.py | 2 +- airflow/api/common/experimental/mark_tasks.py | 2 +- .../api/common/experimental/trigger_dag.py | 2 +- airflow/hooks/dbapi.py | 6 ++--- airflow/macros/__init__.py | 12 ++++----- airflow/models/dagparam.py | 2 +- airflow/models/dagrun.py | 2 +- airflow/providers/amazon/aws/hooks/appflow.py | 2 +- airflow/providers/amazon/aws/hooks/rds.py | 2 +- .../amazon/aws/hooks/redshift_data.py | 2 +- .../amazon/aws/hooks/verified_permissions.py | 2 +- .../kubernetes/operators/kubernetes_pod.py | 2 +- .../kubernetes/triggers/kubernetes_pod.py | 2 +- .../auth_manager/security_manager/override.py | 4 +-- .../providers/google/cloud/hooks/bigquery.py | 2 +- .../slack/notifications/slack_notifier.py | 2 +- .../snowflake/operators/snowflake.py | 2 +- airflow/serialization/pydantic/dag.py | 4 +-- airflow/serialization/pydantic/dag_run.py | 2 +- airflow/settings.py | 2 +- airflow/task/task_runner/base_task_runner.py | 2 +- airflow/utils/pydantic.py | 14 ++++------ airflow/utils/yaml.py | 2 +- dev/breeze/src/airflow_breeze/breeze.py | 26 +++++++++---------- .../airflow_breeze/configure_rich_click.py | 2 +- .../src/airflow_breeze/utils/click_utils.py | 2 +- pyproject.toml | 1 + tests/dags/subdir1/test_ignore_this.py | 2 +- tests/models/test_dagbag.py | 2 +- .../google/cloud/hooks/test_automl.py | 2 +- tests/test_utils/perf/perf_kit/memory.py | 2 +- 31 files changed, 56 insertions(+), 59 deletions(-) diff --git a/airflow/api/common/experimental/delete_dag.py b/airflow/api/common/experimental/delete_dag.py index 42dd8a9048a47..2dddd1265bbfe 100644 --- a/airflow/api/common/experimental/delete_dag.py +++ b/airflow/api/common/experimental/delete_dag.py @@ -20,7 +20,7 @@ import warnings -from airflow.api.common.delete_dag import * # noqa +from airflow.api.common.delete_dag import * # noqa: F403 warnings.warn( "This module is deprecated. Please use `airflow.api.common.delete_dag` instead.", diff --git a/airflow/api/common/experimental/mark_tasks.py b/airflow/api/common/experimental/mark_tasks.py index 7341e00934b32..97e56f313dd6d 100644 --- a/airflow/api/common/experimental/mark_tasks.py +++ b/airflow/api/common/experimental/mark_tasks.py @@ -20,7 +20,7 @@ import warnings -from airflow.api.common.mark_tasks import ( # noqa +from airflow.api.common.mark_tasks import ( # noqa: F401 _create_dagruns, set_dag_run_state_to_failed, set_dag_run_state_to_running, diff --git a/airflow/api/common/experimental/trigger_dag.py b/airflow/api/common/experimental/trigger_dag.py index 4bcbf11fcbce9..8e9d0e146219b 100644 --- a/airflow/api/common/experimental/trigger_dag.py +++ b/airflow/api/common/experimental/trigger_dag.py @@ -20,7 +20,7 @@ import warnings -from airflow.api.common.trigger_dag import * # noqa +from airflow.api.common.trigger_dag import * # noqa: F403 warnings.warn( "This module is deprecated. Please use `airflow.api.common.trigger_dag` instead.", diff --git a/airflow/hooks/dbapi.py b/airflow/hooks/dbapi.py index f3f28c40e0216..eb59f3268d86c 100644 --- a/airflow/hooks/dbapi.py +++ b/airflow/hooks/dbapi.py @@ -21,9 +21,9 @@ import warnings from airflow.exceptions import RemovedInAirflow3Warning -from airflow.providers.common.sql.hooks.sql import ( - ConnectorProtocol, # noqa - DbApiHook, # noqa +from airflow.providers.common.sql.hooks.sql import ( # noqa: F401 + ConnectorProtocol, + DbApiHook, ) warnings.warn( diff --git a/airflow/macros/__init__.py b/airflow/macros/__init__.py index ee22a47dc631d..cf0e5a3ef987e 100644 --- a/airflow/macros/__init__.py +++ b/airflow/macros/__init__.py @@ -17,16 +17,16 @@ # under the License. from __future__ import annotations -import json # noqa -import time # noqa -import uuid # noqa +import json # noqa: F401 +import time # noqa: F401 +import uuid # noqa: F401 from datetime import datetime, timedelta -from random import random # noqa +from random import random # noqa: F401 from typing import TYPE_CHECKING, Any -import dateutil # noqa +import dateutil # noqa: F401 -import airflow.utils.yaml as yaml # noqa +import airflow.utils.yaml as yaml # noqa: F401 from airflow.utils.deprecation_tools import add_deprecated_classes if TYPE_CHECKING: diff --git a/airflow/models/dagparam.py b/airflow/models/dagparam.py index f20bd078c80a6..9b06151966413 100644 --- a/airflow/models/dagparam.py +++ b/airflow/models/dagparam.py @@ -20,7 +20,7 @@ import warnings from airflow.exceptions import RemovedInAirflow3Warning -from airflow.models.param import DagParam # noqa +from airflow.models.param import DagParam # noqa: F401 warnings.warn( "This module is deprecated. Please use `airflow.models.param`.", diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index f9126dd6313bf..ec317a6070a7e 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -1421,7 +1421,7 @@ def get_run(session: Session, dag_id: str, execution_date: datetime) -> DagRun | return session.scalar( select(DagRun).where( DagRun.dag_id == dag_id, - DagRun.external_trigger == False, # noqa + DagRun.external_trigger == False, # noqa: E712 DagRun.execution_date == execution_date, ) ) diff --git a/airflow/providers/amazon/aws/hooks/appflow.py b/airflow/providers/amazon/aws/hooks/appflow.py index f60b5eea1aba5..5ef994917926b 100644 --- a/airflow/providers/amazon/aws/hooks/appflow.py +++ b/airflow/providers/amazon/aws/hooks/appflow.py @@ -22,7 +22,7 @@ from airflow.providers.amazon.aws.utils.waiter_with_logging import wait if TYPE_CHECKING: - from mypy_boto3_appflow.client import AppflowClient # noqa + from mypy_boto3_appflow.client import AppflowClient # noqa: F401 class AppflowHook(AwsGenericHook["AppflowClient"]): diff --git a/airflow/providers/amazon/aws/hooks/rds.py b/airflow/providers/amazon/aws/hooks/rds.py index 1b84ff018f765..9232fb717775d 100644 --- a/airflow/providers/amazon/aws/hooks/rds.py +++ b/airflow/providers/amazon/aws/hooks/rds.py @@ -26,7 +26,7 @@ from airflow.providers.amazon.aws.utils.waiter_with_logging import wait if TYPE_CHECKING: - from mypy_boto3_rds import RDSClient # noqa + from mypy_boto3_rds import RDSClient # noqa: F401 class RdsHook(AwsGenericHook["RDSClient"]): diff --git a/airflow/providers/amazon/aws/hooks/redshift_data.py b/airflow/providers/amazon/aws/hooks/redshift_data.py index 538e5cee96909..24bc3eace210c 100644 --- a/airflow/providers/amazon/aws/hooks/redshift_data.py +++ b/airflow/providers/amazon/aws/hooks/redshift_data.py @@ -25,7 +25,7 @@ from airflow.providers.amazon.aws.utils import trim_none_values if TYPE_CHECKING: - from mypy_boto3_redshift_data import RedshiftDataAPIServiceClient # noqa + from mypy_boto3_redshift_data import RedshiftDataAPIServiceClient # noqa: F401 from mypy_boto3_redshift_data.type_defs import DescribeStatementResponseTypeDef FINISHED_STATE = "FINISHED" diff --git a/airflow/providers/amazon/aws/hooks/verified_permissions.py b/airflow/providers/amazon/aws/hooks/verified_permissions.py index 8c4bb7e90c911..130976758b61e 100644 --- a/airflow/providers/amazon/aws/hooks/verified_permissions.py +++ b/airflow/providers/amazon/aws/hooks/verified_permissions.py @@ -21,7 +21,7 @@ from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook if TYPE_CHECKING: - from mypy_boto3_verifiedpermissions.client import VerifiedPermissionsClient # noqa + from mypy_boto3_verifiedpermissions.client import VerifiedPermissionsClient # noqa: F401 class VerifiedPermissionsHook(AwsGenericHook["VerifiedPermissionsClient"]): diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py index 36993895bc311..f415d897d2f74 100644 --- a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py +++ b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py @@ -21,7 +21,7 @@ import warnings from airflow.exceptions import AirflowProviderDeprecationWarning -from airflow.providers.cncf.kubernetes.operators.pod import * # noqa +from airflow.providers.cncf.kubernetes.operators.pod import * # noqa: F403 warnings.warn( "This module is deprecated. Please use `airflow.providers.cncf.kubernetes.operators.pod` instead.", diff --git a/airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py b/airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py index 3cb60f7bd12af..d6b1bbcaa5855 100644 --- a/airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py +++ b/airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py @@ -21,7 +21,7 @@ import warnings from airflow.exceptions import AirflowProviderDeprecationWarning -from airflow.providers.cncf.kubernetes.triggers.pod import * # noqa +from airflow.providers.cncf.kubernetes.triggers.pod import * # noqa: F403 warnings.warn( "This module is deprecated. Please use `airflow.providers.cncf.kubernetes.triggers.pod` instead.", diff --git a/airflow/providers/fab/auth_manager/security_manager/override.py b/airflow/providers/fab/auth_manager/security_manager/override.py index fe3dfa382a0c2..1859646458efd 100644 --- a/airflow/providers/fab/auth_manager/security_manager/override.py +++ b/airflow/providers/fab/auth_manager/security_manager/override.py @@ -1249,8 +1249,8 @@ def clean_perms(self) -> None: sesh = self.appbuilder.get_session perms = sesh.query(Permission).filter( or_( - Permission.action == None, # noqa - Permission.resource == None, # noqa + Permission.action == None, # noqa: E711 + Permission.resource == None, # noqa: E711 ) ) # Since FAB doesn't define ON DELETE CASCADE on these tables, we need diff --git a/airflow/providers/google/cloud/hooks/bigquery.py b/airflow/providers/google/cloud/hooks/bigquery.py index 2b252f3c3f43d..06e59c2b20f3f 100644 --- a/airflow/providers/google/cloud/hooks/bigquery.py +++ b/airflow/providers/google/cloud/hooks/bigquery.py @@ -49,7 +49,7 @@ from google.cloud.exceptions import NotFound from googleapiclient.discovery import Resource, build from pandas_gbq import read_gbq -from pandas_gbq.gbq import GbqConnector # noqa +from pandas_gbq.gbq import GbqConnector # noqa: F401 Used in ``airflow.contrib.hooks.bigquery`` from requests import Session from sqlalchemy import create_engine diff --git a/airflow/providers/slack/notifications/slack_notifier.py b/airflow/providers/slack/notifications/slack_notifier.py index 957d65e7e7981..df8e41523055a 100644 --- a/airflow/providers/slack/notifications/slack_notifier.py +++ b/airflow/providers/slack/notifications/slack_notifier.py @@ -20,7 +20,7 @@ import warnings from airflow.exceptions import AirflowProviderDeprecationWarning -from airflow.providers.slack.notifications.slack import SlackNotifier # noqa +from airflow.providers.slack.notifications.slack import SlackNotifier # noqa: F401 warnings.warn( "This module is deprecated. Please use `airflow.providers.slack.notifications.slack`", diff --git a/airflow/providers/snowflake/operators/snowflake.py b/airflow/providers/snowflake/operators/snowflake.py index 2de40bc04db14..70b3dfda43472 100644 --- a/airflow/providers/snowflake/operators/snowflake.py +++ b/airflow/providers/snowflake/operators/snowflake.py @@ -449,7 +449,7 @@ class SnowflakeSqlApiOperator(SQLExecuteQueryOperator): When executing the statement, Snowflake replaces placeholders (? and :name) in the statement with these specified values. :param deferrable: Run operator in the deferrable mode. - """ # noqa + """ # noqa: D205, D400 LIFETIME = timedelta(minutes=59) # The tokens will have a 59 minutes lifetime RENEWAL_DELTA = timedelta(minutes=54) # Tokens will be renewed after 54 minutes diff --git a/airflow/serialization/pydantic/dag.py b/airflow/serialization/pydantic/dag.py index 03a49f5663dbe..1046f0b16a8e9 100644 --- a/airflow/serialization/pydantic/dag.py +++ b/airflow/serialization/pydantic/dag.py @@ -125,8 +125,8 @@ class DagModelPydantic(BaseModelPydantic): default_view: Optional[str] schedule_interval: Optional[PydanticInterval] timetable_description: Optional[str] - tags: List[DagTagPydantic] # noqa - dag_owner_links: List[DagOwnerAttributesPydantic] # noqa + tags: List[DagTagPydantic] # noqa: UP006 + dag_owner_links: List[DagOwnerAttributesPydantic] # noqa: UP006 parent_dag: Optional[PydanticDag] max_active_tasks: int diff --git a/airflow/serialization/pydantic/dag_run.py b/airflow/serialization/pydantic/dag_run.py index 3de072cf90fa3..65133ad50dbae 100644 --- a/airflow/serialization/pydantic/dag_run.py +++ b/airflow/serialization/pydantic/dag_run.py @@ -53,7 +53,7 @@ class DagRunPydantic(BaseModelPydantic): dag_hash: Optional[str] updated_at: Optional[datetime] dag: Optional[PydanticDag] - consumed_dataset_events: List[DatasetEventPydantic] # noqa + consumed_dataset_events: List[DatasetEventPydantic] # noqa: UP006 log_template_id: Optional[int] model_config = ConfigDict(from_attributes=True, arbitrary_types_allowed=True) diff --git a/airflow/settings.py b/airflow/settings.py index 7356b8fac6e81..6cbf6126e09dc 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -32,7 +32,7 @@ from sqlalchemy.pool import NullPool from airflow import policies -from airflow.configuration import AIRFLOW_HOME, WEBSERVER_CONFIG, conf # NOQA F401 +from airflow.configuration import AIRFLOW_HOME, WEBSERVER_CONFIG, conf # noqa: F401 from airflow.exceptions import RemovedInAirflow3Warning from airflow.executors import executor_constants from airflow.logging_config import configure_logging diff --git a/airflow/task/task_runner/base_task_runner.py b/airflow/task/task_runner/base_task_runner.py index 979a547d3d447..a0029ebc5064d 100644 --- a/airflow/task/task_runner/base_task_runner.py +++ b/airflow/task/task_runner/base_task_runner.py @@ -27,7 +27,7 @@ if not IS_WINDOWS: # ignored to avoid flake complaining on Linux - from pwd import getpwnam # noqa + from pwd import getpwnam # noqa: F401 from typing import TYPE_CHECKING diff --git a/airflow/utils/pydantic.py b/airflow/utils/pydantic.py index 13ab7911663f2..85fde06195362 100644 --- a/airflow/utils/pydantic.py +++ b/airflow/utils/pydantic.py @@ -40,26 +40,22 @@ def is_pydantic_2_installed() -> bool: from pydantic import BaseModel, ConfigDict, PlainSerializer, PlainValidator, ValidationInfo else: - class BaseModel: # type: ignore[no-redef] # noqa + class BaseModel: # type: ignore[no-redef] # noqa: D101 def __init__(self, *args, **kwargs): pass - class ConfigDict: # type: ignore[no-redef] # noqa + class ConfigDict: # type: ignore[no-redef] # noqa: D101 def __init__(self, *args, **kwargs): pass - class PlainSerializer: # type: ignore[no-redef] # noqa + class PlainSerializer: # type: ignore[no-redef] # noqa: D101 def __init__(self, *args, **kwargs): pass - class PlainSerializer: # type: ignore[no-redef] # noqa + class PlainValidator: # type: ignore[no-redef] # noqa: D101 def __init__(self, *args, **kwargs): pass - class PlainValidator: # type: ignore[no-redef] # noqa - def __init__(self, *args, **kwargs): - pass - - class ValidationInfo: # type: ignore[no-redef] # noqa + class ValidationInfo: # type: ignore[no-redef] # noqa: D101 def __init__(self, *args, **kwargs): pass diff --git a/airflow/utils/yaml.py b/airflow/utils/yaml.py index 43201405f8958..28e117fcd5877 100644 --- a/airflow/utils/yaml.py +++ b/airflow/utils/yaml.py @@ -30,7 +30,7 @@ from typing import TYPE_CHECKING, Any, BinaryIO, TextIO, cast if TYPE_CHECKING: - from yaml.error import MarkedYAMLError, YAMLError # noqa + from yaml.error import MarkedYAMLError, YAMLError # noqa: F401 def safe_load(stream: bytes | str | BinaryIO | TextIO) -> Any: diff --git a/dev/breeze/src/airflow_breeze/breeze.py b/dev/breeze/src/airflow_breeze/breeze.py index b0b8879303b88..4336c4cfd6a59 100755 --- a/dev/breeze/src/airflow_breeze/breeze.py +++ b/dev/breeze/src/airflow_breeze/breeze.py @@ -23,24 +23,24 @@ find_airflow_sources_root_to_operate_on, ) -from airflow_breeze.configure_rich_click import click # isort: skip # noqa +from airflow_breeze.configure_rich_click import click # isort: skip # noqa: F401 find_airflow_sources_root_to_operate_on() create_directories_and_files() -from airflow_breeze.commands import developer_commands # noqa -from airflow_breeze.commands.ci_commands import ci_group # noqa -from airflow_breeze.commands.ci_image_commands import ci_image # noqa -from airflow_breeze.commands.kubernetes_commands import kubernetes_group # noqa -from airflow_breeze.commands.production_image_commands import prod_image # noqa -from airflow_breeze.commands.release_management_commands import release_management # noqa -from airflow_breeze.commands.minor_release_command import create_minor_version_branch # noqa -from airflow_breeze.commands.release_command import airflow_release # noqa -from airflow_breeze.commands.release_candidate_command import release_management # noqa -from airflow_breeze.commands.sbom_commands import sbom # noqa -from airflow_breeze.commands.setup_commands import setup # noqa -from airflow_breeze.commands.testing_commands import group_for_testing # noqa +from airflow_breeze.commands import developer_commands # noqa: I001, E402, F401 +from airflow_breeze.commands.ci_commands import ci_group # noqa: E402 +from airflow_breeze.commands.ci_image_commands import ci_image # noqa: E402 +from airflow_breeze.commands.kubernetes_commands import kubernetes_group # noqa: E402 +from airflow_breeze.commands.production_image_commands import prod_image # noqa: E402 +from airflow_breeze.commands.release_management_commands import release_management # noqa: E402 +from airflow_breeze.commands.minor_release_command import create_minor_version_branch # noqa: E402, F401 +from airflow_breeze.commands.release_command import airflow_release # noqa: E402, F401 +from airflow_breeze.commands.release_candidate_command import release_management # noqa: E402, F811 +from airflow_breeze.commands.sbom_commands import sbom # noqa: E402 +from airflow_breeze.commands.setup_commands import setup # noqa: E402 +from airflow_breeze.commands.testing_commands import group_for_testing # noqa: E402 main.add_command(group_for_testing) main.add_command(kubernetes_group) diff --git a/dev/breeze/src/airflow_breeze/configure_rich_click.py b/dev/breeze/src/airflow_breeze/configure_rich_click.py index 63f79d6777847..fc6ccb9d59084 100644 --- a/dev/breeze/src/airflow_breeze/configure_rich_click.py +++ b/dev/breeze/src/airflow_breeze/configure_rich_click.py @@ -18,7 +18,7 @@ from airflow_breeze.commands.sbom_commands_config import SBOM_COMMANDS, SBOM_PARAMETERS -from airflow_breeze.utils import recording # isort:skip # noqa +from airflow_breeze.utils import recording # isort:skip # noqa: F401 try: # We handle ImportError so that click autocomplete works diff --git a/dev/breeze/src/airflow_breeze/utils/click_utils.py b/dev/breeze/src/airflow_breeze/utils/click_utils.py index 46b7e8845fcd5..92d8d7b7cea15 100644 --- a/dev/breeze/src/airflow_breeze/utils/click_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/click_utils.py @@ -19,4 +19,4 @@ try: from rich_click import RichGroup as BreezeGroup except ImportError: - from click import Group as BreezeGroup # type: ignore[assignment] # noqa + from click import Group as BreezeGroup # type: ignore[assignment] # noqa: F401 diff --git a/pyproject.toml b/pyproject.toml index 2918a7ccf9596..9a7cd55114483 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1330,6 +1330,7 @@ extend-select = [ "D419", "TID251", # Specific modules or module members that may not be imported or accessed "TID253", # Ban certain modules from being imported at module level + "PGH004", # Use specific rule codes when using noqa "B006", # Checks for uses of mutable objects as function argument defaults. ] ignore = [ diff --git a/tests/dags/subdir1/test_ignore_this.py b/tests/dags/subdir1/test_ignore_this.py index da333748b98d5..e981dd3256d1d 100644 --- a/tests/dags/subdir1/test_ignore_this.py +++ b/tests/dags/subdir1/test_ignore_this.py @@ -18,6 +18,6 @@ from __future__ import annotations # needed to work against airflow "safe mode" parsing -from airflow.models import DAG # noqa +from airflow.models import DAG # noqa: F401 raise Exception("This dag file should have been ignored!") diff --git a/tests/models/test_dagbag.py b/tests/models/test_dagbag.py index 0599c9444f2fb..ea6d370c621e5 100644 --- a/tests/models/test_dagbag.py +++ b/tests/models/test_dagbag.py @@ -163,7 +163,7 @@ def create_dag(): def my_flow(): pass - my_dag = my_flow() # noqa + my_dag = my_flow() # noqa: F841 source_lines = [line[12:] for line in inspect.getsource(create_dag).splitlines(keepends=True)[1:]] path1 = tmp_path / "testfile1" diff --git a/tests/providers/google/cloud/hooks/test_automl.py b/tests/providers/google/cloud/hooks/test_automl.py index e774ba584afff..f79dd8b51b73d 100644 --- a/tests/providers/google/cloud/hooks/test_automl.py +++ b/tests/providers/google/cloud/hooks/test_automl.py @@ -71,7 +71,7 @@ def test_get_conn(self, mock_automl_client): @mock.patch("airflow.providers.google.cloud.hooks.automl.PredictionServiceClient") def test_prediction_client(self, mock_prediction_client): - client = self.hook.prediction_client # noqa + client = self.hook.prediction_client # noqa: F841 mock_prediction_client.assert_called_once_with(credentials=CREDENTIALS, client_info=CLIENT_INFO) @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.create_model") diff --git a/tests/test_utils/perf/perf_kit/memory.py b/tests/test_utils/perf/perf_kit/memory.py index 6cef98a8ebac0..471ed1282aaf9 100644 --- a/tests/test_utils/perf/perf_kit/memory.py +++ b/tests/test_utils/perf/perf_kit/memory.py @@ -83,4 +83,4 @@ def trace_memory(human_readable=True, gc_collect=False): # Example: with trace_memory(): - import airflow # noqa + import airflow # noqa: F401