Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -413,6 +413,13 @@ repos:
^airflow_breeze/templates/PROVIDER_README_TEMPLATE\.rst\.jinja2$
additional_dependencies: ['rich>=12.4.4','requests>=2.31.0']
require_serial: true
- id: check-airflow-v-imports-in-tests
name: Check AIRFLOW_V imports in tests
language: python
entry: ./scripts/ci/pre_commit/check_airflow_v_imports_in_tests.py
pass_filenames: true
files: ^providers/.*/tests/.+\.py$
additional_dependencies: ['rich>=12.4.4']
- id: ruff
name: Run 'ruff' for extremely fast Python linting
description: "Run 'ruff' for extremely fast Python linting"
Expand Down
2 changes: 2 additions & 0 deletions contributing-docs/08_static_code_checks.rst
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,8 @@ require Breeze Docker image to be built locally.
+-----------------------------------------------------------+--------------------------------------------------------+---------+
| check-airflow-providers-bug-report-template | Sort airflow-bug-report provider list | |
+-----------------------------------------------------------+--------------------------------------------------------+---------+
| check-airflow-v-imports-in-tests | Check AIRFLOW_V imports in tests | |
+-----------------------------------------------------------+--------------------------------------------------------+---------+
| check-apache-license-rat | Check if licenses are OK for Apache | |
+-----------------------------------------------------------+--------------------------------------------------------+---------+
| check-base-operator-partial-arguments | Check BaseOperator and partial() arguments | |
Expand Down
172 changes: 88 additions & 84 deletions dev/breeze/doc/images/output_static-checks.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 1 addition & 1 deletion dev/breeze/doc/images/output_static-checks.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
4d21a8da51102b0c61707aac83f31b3a
bfb0d23efe62297165a4aacc0cddcfb1
1 change: 1 addition & 0 deletions dev/breeze/src/airflow_breeze/pre_commit_ids.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
"check-aiobotocore-optional",
"check-airflow-k8s-not-used",
"check-airflow-providers-bug-report-template",
"check-airflow-v-imports-in-tests",
"check-apache-license-rat",
"check-base-operator-partial-arguments",
"check-base-operator-usage",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -419,7 +419,7 @@ def set_up(env_id, role_arn):
_install_aws_cli_if_needed()
_build_and_upload_docker_image(preprocess_script, ecr_repository_uri)

from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import get_current_context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import boto3
import pytest

from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if not AIRFLOW_V_3_0_PLUS:
pytest.skip("AWS auth manager is only compatible with Airflow >= 3.0.0", allow_module_level=True)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,11 @@

from airflow.decorators import task
from airflow.providers.amazon.aws.hooks.ssm import SsmHook
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
from airflow.utils.state import DagRunState, State
from airflow.utils.trigger_rule import TriggerRule

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if TYPE_CHECKING:
from botocore.client import BaseClient

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

import pytest

from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if not AIRFLOW_V_3_0_PLUS:
pytest.skip("AWS auth manager is only compatible with Airflow >= 3.0.0", allow_module_level=True)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

import pytest

from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if not AIRFLOW_V_3_0_PLUS:
pytest.skip("AWS auth manager is only compatible with Airflow >= 3.0.0", allow_module_level=True)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

import pytest

from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if not AIRFLOW_V_3_0_PLUS:
pytest.skip("AWS auth manager is only compatible with Airflow >= 3.0.0", allow_module_level=True)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

import pytest

from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if not AIRFLOW_V_3_0_PLUS:
pytest.skip("AWS auth manager is only compatible with Airflow >= 3.0.0", allow_module_level=True)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
import pytest

from airflow.providers.amazon.aws.links.athena import AthenaQueryResultsLink
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
from unit.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase

if AIRFLOW_V_3_0_PLUS:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
BatchJobDetailsLink,
BatchJobQueueLink,
)
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
from unit.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase

if AIRFLOW_V_3_0_PLUS:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
ComprehendDocumentClassifierLink,
ComprehendPiiEntitiesDetectionLink,
)
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
from unit.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase

if AIRFLOW_V_3_0_PLUS:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
from __future__ import annotations

from airflow.providers.amazon.aws.links.datasync import DataSyncTaskExecutionLink, DataSyncTaskLink
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
from unit.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase

if AIRFLOW_V_3_0_PLUS:
Expand Down
2 changes: 1 addition & 1 deletion providers/amazon/tests/unit/amazon/aws/links/test_ec2.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
from __future__ import annotations

from airflow.providers.amazon.aws.links.ec2 import EC2InstanceDashboardLink, EC2InstanceLink
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
from unit.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase

if AIRFLOW_V_3_0_PLUS:
Expand Down
2 changes: 1 addition & 1 deletion providers/amazon/tests/unit/amazon/aws/links/test_emr.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@
get_log_uri,
get_serverless_dashboard_url,
)
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
from unit.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase

pytestmark = pytest.mark.db_test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
from __future__ import annotations

from airflow.providers.amazon.aws.links.glue import GlueJobRunDetailsLink
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
from unit.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase

if AIRFLOW_V_3_0_PLUS:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
from __future__ import annotations

from airflow.providers.amazon.aws.links.logs import CloudWatchEventsLink
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
from unit.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase

if AIRFLOW_V_3_0_PLUS:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@

from airflow.providers.amazon.aws.links.base_aws import BaseAwsLink
from airflow.providers.amazon.aws.links.sagemaker import SageMakerTransformJobLink
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
from unit.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase

if AIRFLOW_V_3_0_PLUS:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
from __future__ import annotations

from airflow.providers.amazon.aws.links.sagemaker_unified_studio import SageMakerUnifiedStudioLink
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
from unit.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase

if AIRFLOW_V_3_0_PLUS:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@
StateMachineDetailsLink,
StateMachineExecutionsDetailsLink,
)
from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
from unit.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase

if AIRFLOW_V_3_0_PLUS:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,10 @@
)
from airflow.providers.apache.beam.triggers.beam import BeamJavaPipelineTrigger, BeamPythonPipelineTrigger
from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration
from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
from airflow.version import version

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

TASK_ID = "test-beam-operator"
DEFAULT_RUNNER = "DirectRunner"
JOB_ID = "test-dataflow-pipeline-id"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@
from airflow.cli import cli_parser
from airflow.executors import executor_loader
from airflow.providers.cncf.kubernetes.cli import kubernetes_command
from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common.test_utils.config import conf_vars
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

pytestmark = pytest.mark.db_test

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

import pytest

from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import task
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@
import pytest

from airflow.exceptions import AirflowSkipException
from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import task
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

import pytest

from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import setup, task, teardown
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,12 @@

from airflow.models.renderedtifields import RenderedTaskInstanceFields, RenderedTaskInstanceFields as RTIF
from airflow.providers.cncf.kubernetes.template_rendering import get_rendered_k8s_spec, render_k8s_pod_yaml
from airflow.providers.cncf.kubernetes.version_compat import AIRFLOW_V_3_0_PLUS
from airflow.utils import timezone
from airflow.utils.session import create_session
from airflow.version import version

from tests_common.test_utils.compat import BashOperator
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

pytestmark = pytest.mark.db_test

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,10 @@
from airflow import DAG
from airflow.decorators import task
from airflow.providers.common.io.operators.file_transfer import FileTransferOperator
from airflow.providers.common.io.version_compat import AIRFLOW_V_3_0_PLUS
from airflow.utils.trigger_rule import TriggerRule

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import ObjectStoragePath
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,14 @@
import pytest

import airflow.models.xcom
from airflow.providers.common.io.version_compat import AIRFLOW_V_3_0_PLUS
from airflow.providers.common.io.xcom.backend import XComObjectStorageBackend
from airflow.providers.standard.operators.empty import EmptyOperator
from airflow.utils import timezone
from airflow.utils.xcom import XCOM_RETURN_KEY

from tests_common.test_utils import db
from tests_common.test_utils.config import conf_vars
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

pytestmark = [pytest.mark.db_test]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,9 @@
get_launch_task_id,
store_databricks_job_run_link,
)
from airflow.providers.databricks.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

DAG_ID = "test_dag"
TASK_ID = "test_task"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,10 @@
DbtCloudRunJobOperator,
)
from airflow.providers.dbt.cloud.triggers.dbt import DbtCloudRunJobTrigger
from airflow.providers.dbt.cloud.version_compat import AIRFLOW_V_3_0_PLUS
from airflow.utils import timezone

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if AIRFLOW_V_3_0_PLUS:
from airflow.sdk.execution_time.comms import XComResult

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

import pytest

from airflow.providers.docker.version_compat import AIRFLOW_V_3_0_PLUS
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import setup, task, teardown
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
from datetime import datetime

from airflow.models.dag import DAG
from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
from airflow.providers.google.cloud.operators.kubernetes_engine import (
GKECreateClusterOperator,
GKEDeleteClusterOperator,
Expand All @@ -35,6 +34,7 @@
from airflow.utils.trigger_rule import TriggerRule

from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "kubernetes_engine"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
from datetime import datetime

from airflow.models.dag import DAG
from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
from airflow.providers.google.cloud.operators.kubernetes_engine import (
GKECreateClusterOperator,
GKEDeleteClusterOperator,
Expand All @@ -35,6 +34,7 @@
from airflow.utils.trigger_rule import TriggerRule

from system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "kubernetes_engine_async"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,11 @@
from datetime import datetime

from airflow.models.dag import DAG
from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
from airflow.providers.google.cloud.operators.translate import CloudTranslateTextOperator
from airflow.providers.standard.operators.bash import BashOperator

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

DAG_ID = "gcp_translate"

with DAG(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@

from airflow.providers.google.cloud.links.base import BaseGoogleLink
from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if AIRFLOW_V_3_0_PLUS:
from airflow.sdk.execution_time.comms import XComResult
Expand Down
Loading