diff --git a/providers/amazon/pyproject.toml b/providers/amazon/pyproject.toml index a1d8cc66327ef..d33930e9e4431 100644 --- a/providers/amazon/pyproject.toml +++ b/providers/amazon/pyproject.toml @@ -58,7 +58,7 @@ requires-python = ">=3.10" # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build`` dependencies = [ "apache-airflow>=2.10.0", - "apache-airflow-providers-common-compat>=1.6.1", + "apache-airflow-providers-common-compat>=1.7.4", # TODO: bump to next version "apache-airflow-providers-common-sql>=1.27.0", "apache-airflow-providers-http", # We should update minimum version of boto3 and here regularly to avoid `pip` backtracking with the number diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/base_aws.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/base_aws.py index 979cb4bc345b9..ff71ad365a5d2 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/base_aws.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/base_aws.py @@ -60,7 +60,7 @@ from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper from airflow.providers.amazon.aws.utils.identifiers import generate_uuid from airflow.providers.amazon.aws.utils.suppress import return_on_error -from airflow.providers.amazon.version_compat import BaseHook +from airflow.providers.common.compat.sdk import BaseHook from airflow.providers_manager import ProvidersManager from airflow.utils.helpers import exactly_one from airflow.utils.log.logging_mixin import LoggingMixin diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py index ebf3c22d8d691..1484c286a1bfb 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py @@ -26,7 +26,7 @@ from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.utils.sagemaker_unified_studio import is_local_runner -from airflow.providers.amazon.version_compat import BaseHook +from airflow.providers.common.compat.sdk import BaseHook class SageMakerNotebookHook(BaseHook): diff --git a/providers/amazon/src/airflow/providers/amazon/aws/links/base_aws.py b/providers/amazon/src/airflow/providers/amazon/aws/links/base_aws.py index 083fffdcd6345..5be80e58d9f59 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/links/base_aws.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/links/base_aws.py @@ -20,7 +20,7 @@ from typing import TYPE_CHECKING, ClassVar from airflow.providers.amazon.aws.utils.suppress import return_on_error -from airflow.providers.amazon.version_compat import BaseOperatorLink, XCom +from airflow.providers.common.compat.sdk import BaseOperatorLink, XCom if TYPE_CHECKING: from airflow.models import BaseOperator diff --git a/providers/amazon/src/airflow/providers/amazon/aws/operators/base_aws.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/base_aws.py index c998ebc046630..29fb40d3e67ed 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/operators/base_aws.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/base_aws.py @@ -25,7 +25,7 @@ AwsHookType, aws_template_fields, ) -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.utils.types import NOTSET, ArgNotSet diff --git a/providers/amazon/src/airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py index cf204b84c6e0d..7b6f6dbec8a38 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py @@ -33,7 +33,7 @@ from airflow.providers.amazon.aws.triggers.sagemaker_unified_studio import ( SageMakerNotebookJobTrigger, ) -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator if TYPE_CHECKING: from airflow.utils.context import Context diff --git a/providers/amazon/src/airflow/providers/amazon/aws/sensors/base_aws.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/base_aws.py index 58db967e292c5..b13634bc2bdd3 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/sensors/base_aws.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/sensors/base_aws.py @@ -25,7 +25,7 @@ AwsHookType, aws_template_fields, ) -from airflow.providers.amazon.version_compat import BaseSensorOperator +from airflow.providers.common.compat.sdk import BaseSensorOperator from airflow.utils.types import NOTSET, ArgNotSet diff --git a/providers/amazon/src/airflow/providers/amazon/aws/sensors/s3.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/s3.py index 42cdbd402ee19..878e31b924594 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/sensors/s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/sensors/s3.py @@ -36,7 +36,7 @@ from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor from airflow.providers.amazon.aws.triggers.s3 import S3KeysUnchangedTrigger, S3KeyTrigger from airflow.providers.amazon.aws.utils.mixins import aws_template_fields -from airflow.providers.amazon.version_compat import poke_mode_only +from airflow.providers.common.compat.sdk import poke_mode_only class S3KeySensor(AwsBaseSensor[S3Hook]): diff --git a/providers/amazon/src/airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py b/providers/amazon/src/airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py index 072e7f1e48feb..62ea2ea2b19a5 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py @@ -25,7 +25,7 @@ from airflow.providers.amazon.aws.hooks.sagemaker_unified_studio import ( SageMakerNotebookHook, ) -from airflow.providers.amazon.version_compat import BaseSensorOperator +from airflow.providers.common.compat.sdk import BaseSensorOperator if TYPE_CHECKING: from airflow.utils.context import Context diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py index e58d43393c85a..f10ae9dbcc68a 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py @@ -23,7 +23,7 @@ from typing import TYPE_CHECKING from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator try: from airflow.providers.microsoft.azure.hooks.wasb import WasbHook diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/base.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/base.py index d678a43cdf846..612e57701cb2f 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/base.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/base.py @@ -22,7 +22,7 @@ from collections.abc import Sequence from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.utils.types import NOTSET, ArgNotSet diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/exasol_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/exasol_to_s3.py index 3b231b9621cfb..2c75ef7ad35a4 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/exasol_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/exasol_to_s3.py @@ -24,7 +24,7 @@ from typing import TYPE_CHECKING from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.providers.exasol.hooks.exasol import ExasolHook if TYPE_CHECKING: diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/ftp_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/ftp_to_s3.py index 92cfcd870806c..a0460a2f8ff25 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/ftp_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/ftp_to_s3.py @@ -22,7 +22,7 @@ from typing import TYPE_CHECKING from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.providers.ftp.hooks.ftp import FTPHook if TYPE_CHECKING: diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/gcs_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/gcs_to_s3.py index 8f6644e09f693..92289a89638b4 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/gcs_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/gcs_to_s3.py @@ -27,7 +27,7 @@ from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.providers.google.cloud.hooks.gcs import GCSHook if TYPE_CHECKING: diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py index 1e0e51841514b..2574f83624872 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py @@ -22,7 +22,7 @@ from typing import TYPE_CHECKING from airflow.providers.amazon.aws.hooks.glacier import GlacierHook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.providers.google.cloud.hooks.gcs import GCSHook if TYPE_CHECKING: diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/google_api_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/google_api_to_s3.py index b6f7b8d149908..e73b4c2bc90ce 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/google_api_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/google_api_to_s3.py @@ -26,14 +26,11 @@ from airflow.models.xcom import XCOM_RETURN_KEY from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.providers.google.common.hooks.discovery_api import GoogleDiscoveryApiHook if TYPE_CHECKING: - try: - from airflow.sdk.types import RuntimeTaskInstanceProtocol - except ImportError: - from airflow.models import TaskInstance as RuntimeTaskInstanceProtocol # type: ignore[assignment] + from airflow.providers.common.compat.sdk import RuntimeTaskInstanceProtocol from airflow.utils.context import Context # MAX XCOM Size is 48KB diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py index b8ab98ba21da1..7d1d1d1348462 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py @@ -24,8 +24,8 @@ from typing import TYPE_CHECKING, Literal from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook -from airflow.providers.amazon.version_compat import BaseOperator from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook +from airflow.providers.common.compat.sdk import BaseOperator if TYPE_CHECKING: from airflow.utils.context import Context diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/http_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/http_to_s3.py index 3a91312cc07b7..ca7582bbc8106 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/http_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/http_to_s3.py @@ -23,7 +23,7 @@ from typing import TYPE_CHECKING, Any from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.providers.http.hooks.http import HttpHook if TYPE_CHECKING: diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py index 5b9979b2ac6a2..6aa046ea5888d 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py @@ -23,7 +23,7 @@ from typing import TYPE_CHECKING from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.providers.imap.hooks.imap import ImapHook if TYPE_CHECKING: diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/local_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/local_to_s3.py index 367aedc34add0..a47acd7721467 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/local_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/local_to_s3.py @@ -21,7 +21,7 @@ from typing import TYPE_CHECKING from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator if TYPE_CHECKING: from airflow.utils.context import Context diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/mongo_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/mongo_to_s3.py index 1515b5a951bea..5547e955bbc8e 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/mongo_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/mongo_to_s3.py @@ -24,7 +24,7 @@ from bson import json_util from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.providers.mongo.hooks.mongo import MongoHook if TYPE_CHECKING: diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/redshift_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/redshift_to_s3.py index 4485eb44779cc..a285af2f65b5b 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/redshift_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/redshift_to_s3.py @@ -28,7 +28,7 @@ from airflow.providers.amazon.aws.hooks.redshift_sql import RedshiftSQLHook from airflow.providers.amazon.aws.hooks.s3 import S3Hook from airflow.providers.amazon.aws.utils.redshift import build_credentials_block -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.utils.types import NOTSET, ArgNotSet if TYPE_CHECKING: diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py index e82c3dc5209a1..495d97811fb49 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py @@ -24,7 +24,7 @@ from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator if TYPE_CHECKING: from airflow.utils.context import Context diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_ftp.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_ftp.py index 8ce41b7816d6c..c259e8c9f9800 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_ftp.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_ftp.py @@ -22,7 +22,7 @@ from typing import TYPE_CHECKING from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.providers.ftp.hooks.ftp import FTPHook if TYPE_CHECKING: diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_redshift.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_redshift.py index 906710e622bca..ae36822976ba8 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_redshift.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_redshift.py @@ -24,7 +24,7 @@ from airflow.providers.amazon.aws.hooks.redshift_sql import RedshiftSQLHook from airflow.providers.amazon.aws.hooks.s3 import S3Hook from airflow.providers.amazon.aws.utils.redshift import build_credentials_block -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.utils.types import NOTSET, ArgNotSet if TYPE_CHECKING: diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sftp.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sftp.py index 849c12bdc2af5..eee92bf23ebfe 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sftp.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sftp.py @@ -23,7 +23,7 @@ from urllib.parse import urlsplit from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.providers.ssh.hooks.ssh import SSHHook if TYPE_CHECKING: diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py index a398538e24407..5ecf8cb755d25 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py @@ -23,7 +23,7 @@ from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseHook, BaseOperator +from airflow.providers.common.compat.sdk import BaseHook, BaseOperator if TYPE_CHECKING: from airflow.utils.context import Context diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py index ac50323ccb7d5..e873e9490dbc9 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py @@ -22,7 +22,7 @@ from typing import TYPE_CHECKING from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.providers.salesforce.hooks.salesforce import SalesforceHook if TYPE_CHECKING: diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/sftp_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/sftp_to_s3.py index 8ba11155f2f8c..0399b2168f9b8 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/sftp_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/sftp_to_s3.py @@ -23,7 +23,7 @@ from urllib.parse import urlsplit from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseOperator +from airflow.providers.common.compat.sdk import BaseOperator from airflow.providers.ssh.hooks.ssh import SSHHook if TYPE_CHECKING: diff --git a/providers/amazon/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py b/providers/amazon/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py index 45224059e994f..31a53b0fd25cf 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py @@ -27,7 +27,7 @@ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.version_compat import BaseHook, BaseOperator +from airflow.providers.common.compat.sdk import BaseHook, BaseOperator if TYPE_CHECKING: import pandas as pd diff --git a/providers/amazon/src/airflow/providers/amazon/aws/utils/connection_wrapper.py b/providers/amazon/src/airflow/providers/amazon/aws/utils/connection_wrapper.py index bcbf3d3857c8a..3ed84db484351 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/utils/connection_wrapper.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/utils/connection_wrapper.py @@ -32,10 +32,7 @@ from airflow.utils.types import NOTSET, ArgNotSet if TYPE_CHECKING: - try: - from airflow.sdk import Connection - except ImportError: - from airflow.models.connection import Connection # type: ignore[assignment] + from airflow.providers.common.compat.sdk import Connection @dataclass diff --git a/providers/amazon/src/airflow/providers/amazon/version_compat.py b/providers/amazon/src/airflow/providers/amazon/version_compat.py index d72fd2992675b..a7d116ec0433a 100644 --- a/providers/amazon/src/airflow/providers/amazon/version_compat.py +++ b/providers/amazon/src/airflow/providers/amazon/version_compat.py @@ -36,28 +36,8 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]: AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0) AIRFLOW_V_3_1_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 1) -if AIRFLOW_V_3_1_PLUS: - from airflow.sdk import BaseHook - from airflow.sdk.bases.sensor import poke_mode_only -else: - from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef] - from airflow.sensors.base import poke_mode_only # type: ignore[no-redef] - -if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import BaseOperator, BaseOperatorLink, BaseSensorOperator - from airflow.sdk.execution_time.xcom import XCom -else: - from airflow.models import BaseOperator, XCom - from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef] - from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef] - __all__ = [ "AIRFLOW_V_3_0_PLUS", "AIRFLOW_V_3_1_PLUS", - "BaseHook", - "BaseOperator", - "BaseOperatorLink", - "BaseSensorOperator", - "poke_mode_only", - "XCom", + "AIRFLOW_V_3_1_1_PLUS", ] diff --git a/providers/amazon/tests/system/amazon/aws/example_appflow.py b/providers/amazon/tests/system/amazon/aws/example_appflow.py index 6ecb2da164a08..f55e77156a8fd 100644 --- a/providers/amazon/tests/system/amazon/aws/example_appflow.py +++ b/providers/amazon/tests/system/amazon/aws/example_appflow.py @@ -17,7 +17,6 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.appflow import ( AppflowRecordsShortCircuitOperator, @@ -26,21 +25,9 @@ AppflowRunDailyOperator, AppflowRunFullOperator, ) +from airflow.providers.common.compat.sdk import DAG, chain from airflow.providers.standard.operators.bash import BashOperator -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG - from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_azure_blob_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_azure_blob_to_s3.py index d71348b7ba991..b79e76c165d94 100644 --- a/providers/amazon/tests/system/amazon/aws/example_azure_blob_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_azure_blob_to_s3.py @@ -17,23 +17,11 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.azure_blob_to_s3 import AzureBlobStorageToS3Operator +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_cloudformation.py b/providers/amazon/tests/system/amazon/aws/example_cloudformation.py index 57091567263b4..c4d3f3cc0b7e0 100644 --- a/providers/amazon/tests/system/amazon/aws/example_cloudformation.py +++ b/providers/amazon/tests/system/amazon/aws/example_cloudformation.py @@ -18,7 +18,6 @@ import json from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.cloud_formation import ( CloudFormationCreateStackOperator, @@ -28,19 +27,8 @@ CloudFormationCreateStackSensor, CloudFormationDeleteStackSensor, ) +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_eks_templated.py b/providers/amazon/tests/system/amazon/aws/example_eks_templated.py index 6068bd6858561..b328e89f3c65c 100644 --- a/providers/amazon/tests/system/amazon/aws/example_eks_templated.py +++ b/providers/amazon/tests/system/amazon/aws/example_eks_templated.py @@ -17,7 +17,6 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.hooks.eks import ClusterStates, NodegroupStates from airflow.providers.amazon.aws.operators.eks import ( @@ -28,19 +27,8 @@ EksPodOperator, ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG from system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py b/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py index ec3e4b9a27e03..cd241027b5cdd 100644 --- a/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py +++ b/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py @@ -17,7 +17,6 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from pendulum import duration @@ -28,19 +27,8 @@ EksPodOperator, ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksFargateProfileStateSensor +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_profile.py b/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_profile.py index 24ec7c4273147..47f9a865d680f 100644 --- a/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_profile.py +++ b/providers/amazon/tests/system/amazon/aws/example_eks_with_fargate_profile.py @@ -17,7 +17,6 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from pendulum import duration @@ -30,19 +29,8 @@ EksPodOperator, ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksFargateProfileStateSensor +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_emr.py b/providers/amazon/tests/system/amazon/aws/example_emr.py index d6c54ac79cd58..67bdb46525a6e 100644 --- a/providers/amazon/tests/system/amazon/aws/example_emr.py +++ b/providers/amazon/tests/system/amazon/aws/example_emr.py @@ -20,7 +20,7 @@ import json from datetime import datetime -from typing import TYPE_CHECKING, Any +from typing import Any import boto3 @@ -32,21 +32,8 @@ ) from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.sensors.emr import EmrJobFlowSensor, EmrStepSensor +from airflow.providers.common.compat.sdk import DAG, chain, task -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.decorators import task # type: ignore[attr-defined] - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain, task - else: - # Airflow 2.10 compat - from airflow.decorators import task # type: ignore[attr-defined,no-redef] - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_emr_notebook_execution.py b/providers/amazon/tests/system/amazon/aws/example_emr_notebook_execution.py index d87514f0a2523..de53778659e0a 100644 --- a/providers/amazon/tests/system/amazon/aws/example_emr_notebook_execution.py +++ b/providers/amazon/tests/system/amazon/aws/example_emr_notebook_execution.py @@ -19,26 +19,14 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.emr import ( EmrStartNotebookExecutionOperator, EmrStopNotebookExecutionOperator, ) from airflow.providers.amazon.aws.sensors.emr import EmrNotebookExecutionSensor +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_emr_notebook" diff --git a/providers/amazon/tests/system/amazon/aws/example_emr_serverless.py b/providers/amazon/tests/system/amazon/aws/example_emr_serverless.py index 3231e22bd5b0d..3b4246dd02af9 100644 --- a/providers/amazon/tests/system/amazon/aws/example_emr_serverless.py +++ b/providers/amazon/tests/system/amazon/aws/example_emr_serverless.py @@ -17,7 +17,6 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING import boto3 @@ -30,19 +29,8 @@ ) from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.sensors.emr import EmrServerlessApplicationSensor, EmrServerlessJobSensor +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_eventbridge.py b/providers/amazon/tests/system/amazon/aws/example_eventbridge.py index 501e5875dc425..762a583c78672 100644 --- a/providers/amazon/tests/system/amazon/aws/example_eventbridge.py +++ b/providers/amazon/tests/system/amazon/aws/example_eventbridge.py @@ -17,7 +17,6 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.eventbridge import ( EventBridgeDisableRuleOperator, @@ -25,19 +24,8 @@ EventBridgePutEventsOperator, EventBridgePutRuleOperator, ) +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_eventbridge" diff --git a/providers/amazon/tests/system/amazon/aws/example_ftp_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_ftp_to_s3.py index 79f564c2dcfe7..4e34cdedf4600 100644 --- a/providers/amazon/tests/system/amazon/aws/example_ftp_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_ftp_to_s3.py @@ -17,23 +17,11 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.ftp_to_s3 import FTPToS3Operator +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_glue.py b/providers/amazon/tests/system/amazon/aws/example_glue.py index 8d87f2c704f90..f2d476d5ab6b6 100644 --- a/providers/amazon/tests/system/amazon/aws/example_glue.py +++ b/providers/amazon/tests/system/amazon/aws/example_glue.py @@ -34,16 +34,7 @@ from airflow.providers.amazon.aws.sensors.glue import GlueJobSensor from airflow.providers.amazon.aws.sensors.glue_catalog_partition import GlueCatalogPartitionSensor from airflow.providers.amazon.aws.sensors.glue_crawler import GlueCrawlerSensor - -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain, task -else: - # Airflow 2 path - from airflow.decorators import task # type: ignore[attr-defined,no-redef] - from airflow.models.baseoperator import chain # type: ignore[attr-defined,no-redef] - from airflow.models.dag import DAG # type: ignore[attr-defined,no-redef,assignment] +from airflow.providers.common.compat.sdk import DAG, chain, task try: from airflow.sdk import TriggerRule diff --git a/providers/amazon/tests/system/amazon/aws/example_google_api_sheets_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_google_api_sheets_to_s3.py index 156cc4fea80d7..910286e1dddfa 100644 --- a/providers/amazon/tests/system/amazon/aws/example_google_api_sheets_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_google_api_sheets_to_s3.py @@ -23,23 +23,11 @@ import os from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_imap_attachment_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_imap_attachment_to_s3.py index 35b12da9bcb5c..f6b7d255bc2ea 100644 --- a/providers/amazon/tests/system/amazon/aws/example_imap_attachment_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_imap_attachment_to_s3.py @@ -22,23 +22,11 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_local_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_local_to_s3.py index 4d43a10c77aa5..82566a34f4a3d 100644 --- a/providers/amazon/tests/system/amazon/aws/example_local_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_local_to_s3.py @@ -18,25 +18,11 @@ import os from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.local_to_s3 import LocalFilesystemToS3Operator +from airflow.providers.common.compat.sdk import DAG, chain, task -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.decorators import task # type: ignore[attr-defined] - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain, task - else: - # Airflow 2.10 compat - from airflow.decorators import task # type: ignore[attr-defined,no-redef] - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_mongo_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_mongo_to_s3.py index a9bd9d22d2695..45ca5b5bd2cfd 100644 --- a/providers/amazon/tests/system/amazon/aws/example_mongo_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_mongo_to_s3.py @@ -16,23 +16,10 @@ # under the License. from __future__ import annotations -from typing import TYPE_CHECKING - from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.mongo_to_s3 import MongoToS3Operator +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_rds_instance.py b/providers/amazon/tests/system/amazon/aws/example_rds_instance.py index 50111f0cfe64f..442fdf453a942 100644 --- a/providers/amazon/tests/system/amazon/aws/example_rds_instance.py +++ b/providers/amazon/tests/system/amazon/aws/example_rds_instance.py @@ -17,7 +17,6 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.rds import ( RdsCreateDbInstanceOperator, @@ -26,19 +25,8 @@ RdsStopDbOperator, ) from airflow.providers.amazon.aws.sensors.rds import RdsDbSensor +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py b/providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py index 671c99dbe6f71..ca7a93e5d8e9c 100644 --- a/providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py +++ b/providers/amazon/tests/system/amazon/aws/example_rds_snapshot.py @@ -17,7 +17,6 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.rds import ( RdsCopyDbSnapshotOperator, @@ -27,19 +26,8 @@ RdsDeleteDbSnapshotOperator, ) from airflow.providers.amazon.aws.sensors.rds import RdsSnapshotExistenceSensor +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_redshift.py b/providers/amazon/tests/system/amazon/aws/example_redshift.py index 149c0d0d72ffd..28cbdf6ebdea0 100644 --- a/providers/amazon/tests/system/amazon/aws/example_redshift.py +++ b/providers/amazon/tests/system/amazon/aws/example_redshift.py @@ -19,7 +19,6 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.redshift_cluster import ( RedshiftCreateClusterOperator, @@ -31,19 +30,8 @@ ) from airflow.providers.amazon.aws.operators.redshift_data import RedshiftDataOperator from airflow.providers.amazon.aws.sensors.redshift_cluster import RedshiftClusterSensor +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_redshift_s3_transfers.py b/providers/amazon/tests/system/amazon/aws/example_redshift_s3_transfers.py index f9a8676064e0c..62ab62e7cad33 100644 --- a/providers/amazon/tests/system/amazon/aws/example_redshift_s3_transfers.py +++ b/providers/amazon/tests/system/amazon/aws/example_redshift_s3_transfers.py @@ -17,7 +17,6 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.redshift_cluster import ( RedshiftCreateClusterOperator, @@ -33,19 +32,8 @@ from airflow.providers.amazon.aws.sensors.s3 import S3KeySensor from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_s3.py b/providers/amazon/tests/system/amazon/aws/example_s3.py index db1c6f1ab57c9..7affb1ee76e72 100644 --- a/providers/amazon/tests/system/amazon/aws/example_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_s3.py @@ -17,7 +17,6 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.s3 import ( S3CopyObjectOperator, @@ -33,19 +32,8 @@ S3PutBucketTaggingOperator, ) from airflow.providers.amazon.aws.sensors.s3 import S3KeySensor, S3KeysUnchangedSensor +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_s3_to_ftp.py b/providers/amazon/tests/system/amazon/aws/example_s3_to_ftp.py index d5cb265b0edfb..288331d620931 100644 --- a/providers/amazon/tests/system/amazon/aws/example_s3_to_ftp.py +++ b/providers/amazon/tests/system/amazon/aws/example_s3_to_ftp.py @@ -17,23 +17,11 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.s3_to_ftp import S3ToFTPOperator +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_s3_to_sftp.py b/providers/amazon/tests/system/amazon/aws/example_s3_to_sftp.py index 284f398904f51..18416ad55cba2 100644 --- a/providers/amazon/tests/system/amazon/aws/example_s3_to_sftp.py +++ b/providers/amazon/tests/system/amazon/aws/example_s3_to_sftp.py @@ -17,23 +17,11 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py b/providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py index 766b01c5d89f5..74ac3d36d8e23 100644 --- a/providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py +++ b/providers/amazon/tests/system/amazon/aws/example_sagemaker_notebook.py @@ -17,7 +17,6 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.sagemaker import ( SageMakerCreateNotebookOperator, @@ -25,19 +24,8 @@ SageMakerStartNoteBookOperator, SageMakerStopNotebookOperator, ) +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG from system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_sagemaker_notebook" diff --git a/providers/amazon/tests/system/amazon/aws/example_salesforce_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_salesforce_to_s3.py index 80bb09024caa1..7f6aa3e2930a4 100644 --- a/providers/amazon/tests/system/amazon/aws/example_salesforce_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_salesforce_to_s3.py @@ -22,23 +22,11 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.salesforce_to_s3 import SalesforceToS3Operator +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/system/amazon/aws/example_sftp_to_s3.py b/providers/amazon/tests/system/amazon/aws/example_sftp_to_s3.py index e3d221b87be32..ec11567865622 100644 --- a/providers/amazon/tests/system/amazon/aws/example_sftp_to_s3.py +++ b/providers/amazon/tests/system/amazon/aws/example_sftp_to_s3.py @@ -17,23 +17,11 @@ from __future__ import annotations from datetime import datetime -from typing import TYPE_CHECKING from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.sftp_to_s3 import SFTPToS3Operator +from airflow.providers.common.compat.sdk import DAG, chain -from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS - -if TYPE_CHECKING: - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG -else: - if AIRFLOW_V_3_0_PLUS: - from airflow.sdk import DAG, chain - else: - # Airflow 2.10 compat - from airflow.models.baseoperator import chain - from airflow.models.dag import DAG try: from airflow.sdk import TriggerRule except ImportError: diff --git a/providers/amazon/tests/unit/amazon/aws/links/test_base_aws.py b/providers/amazon/tests/unit/amazon/aws/links/test_base_aws.py index 383f154f3db8a..1fc8ce25a6188 100644 --- a/providers/amazon/tests/unit/amazon/aws/links/test_base_aws.py +++ b/providers/amazon/tests/unit/amazon/aws/links/test_base_aws.py @@ -23,7 +23,7 @@ import pytest from airflow.providers.amazon.aws.links.base_aws import BaseAwsLink -from airflow.providers.amazon.version_compat import XCom +from airflow.providers.common.compat.sdk import XCom from airflow.serialization.serialized_objects import SerializedDAG from tests_common.test_utils.mock_operators import MockOperator diff --git a/providers/amazon/tests/unit/amazon/aws/operators/test_base_aws.py b/providers/amazon/tests/unit/amazon/aws/operators/test_base_aws.py index 3cddb55eaa0c3..7bfc69d013292 100644 --- a/providers/amazon/tests/unit/amazon/aws/operators/test_base_aws.py +++ b/providers/amazon/tests/unit/amazon/aws/operators/test_base_aws.py @@ -22,7 +22,7 @@ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator -from airflow.providers.amazon.version_compat import BaseHook +from airflow.providers.common.compat.sdk import BaseHook try: from airflow.sdk import timezone diff --git a/providers/amazon/tests/unit/amazon/aws/sensors/test_base_aws.py b/providers/amazon/tests/unit/amazon/aws/sensors/test_base_aws.py index 128e5981e9448..73acd60804cef 100644 --- a/providers/amazon/tests/unit/amazon/aws/sensors/test_base_aws.py +++ b/providers/amazon/tests/unit/amazon/aws/sensors/test_base_aws.py @@ -22,7 +22,7 @@ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor -from airflow.providers.amazon.version_compat import BaseHook +from airflow.providers.common.compat.sdk import BaseHook try: from airflow.sdk import timezone