Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion providers/amazon/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ requires-python = ">=3.10"
# After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
dependencies = [
"apache-airflow>=2.10.0",
"apache-airflow-providers-common-compat>=1.6.1",
"apache-airflow-providers-common-compat>=1.7.4", # TODO: bump to next version
"apache-airflow-providers-common-sql>=1.27.0",
"apache-airflow-providers-http",
# We should update minimum version of boto3 and here regularly to avoid `pip` backtracking with the number
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@
from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper
from airflow.providers.amazon.aws.utils.identifiers import generate_uuid
from airflow.providers.amazon.aws.utils.suppress import return_on_error
from airflow.providers.amazon.version_compat import BaseHook
from airflow.providers.common.compat.sdk import BaseHook
from airflow.providers_manager import ProvidersManager
from airflow.utils.helpers import exactly_one
from airflow.utils.log.logging_mixin import LoggingMixin
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.utils.sagemaker_unified_studio import is_local_runner
from airflow.providers.amazon.version_compat import BaseHook
from airflow.providers.common.compat.sdk import BaseHook


class SageMakerNotebookHook(BaseHook):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from typing import TYPE_CHECKING, ClassVar

from airflow.providers.amazon.aws.utils.suppress import return_on_error
from airflow.providers.amazon.version_compat import BaseOperatorLink, XCom
from airflow.providers.common.compat.sdk import BaseOperatorLink, XCom

if TYPE_CHECKING:
from airflow.models import BaseOperator
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
AwsHookType,
aws_template_fields,
)
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.utils.types import NOTSET, ArgNotSet


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
from airflow.providers.amazon.aws.triggers.sagemaker_unified_studio import (
SageMakerNotebookJobTrigger,
)
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
AwsHookType,
aws_template_fields,
)
from airflow.providers.amazon.version_compat import BaseSensorOperator
from airflow.providers.common.compat.sdk import BaseSensorOperator
from airflow.utils.types import NOTSET, ArgNotSet


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
from airflow.providers.amazon.aws.triggers.s3 import S3KeysUnchangedTrigger, S3KeyTrigger
from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
from airflow.providers.amazon.version_compat import poke_mode_only
from airflow.providers.common.compat.sdk import poke_mode_only


class S3KeySensor(AwsBaseSensor[S3Hook]):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from airflow.providers.amazon.aws.hooks.sagemaker_unified_studio import (
SageMakerNotebookHook,
)
from airflow.providers.amazon.version_compat import BaseSensorOperator
from airflow.providers.common.compat.sdk import BaseSensorOperator

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from typing import TYPE_CHECKING

from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator

try:
from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from collections.abc import Sequence

from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.utils.types import NOTSET, ArgNotSet


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from typing import TYPE_CHECKING

from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.exasol.hooks.exasol import ExasolHook

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from typing import TYPE_CHECKING

from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.ftp.hooks.ftp import FTPHook

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.google.cloud.hooks.gcs import GCSHook

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from typing import TYPE_CHECKING

from airflow.providers.amazon.aws.hooks.glacier import GlacierHook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.google.cloud.hooks.gcs import GCSHook

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,11 @@

from airflow.models.xcom import XCOM_RETURN_KEY
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.google.common.hooks.discovery_api import GoogleDiscoveryApiHook

if TYPE_CHECKING:
try:
from airflow.sdk.types import RuntimeTaskInstanceProtocol
except ImportError:
from airflow.models import TaskInstance as RuntimeTaskInstanceProtocol # type: ignore[assignment]
from airflow.providers.common.compat.sdk import RuntimeTaskInstanceProtocol
from airflow.utils.context import Context

# MAX XCOM Size is 48KB
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
from typing import TYPE_CHECKING, Literal

from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
from airflow.providers.common.compat.sdk import BaseOperator

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from typing import TYPE_CHECKING, Any

from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.http.hooks.http import HttpHook

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from typing import TYPE_CHECKING

from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.imap.hooks.imap import ImapHook

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from typing import TYPE_CHECKING

from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from bson import json_util

from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.mongo.hooks.mongo import MongoHook

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from airflow.providers.amazon.aws.hooks.redshift_sql import RedshiftSQLHook
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.aws.utils.redshift import build_credentials_block
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.utils.types import NOTSET, ArgNotSet

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@

from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from typing import TYPE_CHECKING

from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.ftp.hooks.ftp import FTPHook

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from airflow.providers.amazon.aws.hooks.redshift_sql import RedshiftSQLHook
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.aws.utils.redshift import build_credentials_block
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.utils.types import NOTSET, ArgNotSet

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from urllib.parse import urlsplit

from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.ssh.hooks.ssh import SSHHook

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseHook, BaseOperator
from airflow.providers.common.compat.sdk import BaseHook, BaseOperator

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from typing import TYPE_CHECKING

from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.salesforce.hooks.salesforce import SalesforceHook

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from urllib.parse import urlsplit

from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseOperator
from airflow.providers.common.compat.sdk import BaseOperator
from airflow.providers.ssh.hooks.ssh import SSHHook

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.version_compat import BaseHook, BaseOperator
from airflow.providers.common.compat.sdk import BaseHook, BaseOperator

if TYPE_CHECKING:
import pandas as pd
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,7 @@
from airflow.utils.types import NOTSET, ArgNotSet

if TYPE_CHECKING:
try:
from airflow.sdk import Connection
except ImportError:
from airflow.models.connection import Connection # type: ignore[assignment]
from airflow.providers.common.compat.sdk import Connection


@dataclass
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,28 +36,8 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
AIRFLOW_V_3_1_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 1)

if AIRFLOW_V_3_1_PLUS:
from airflow.sdk import BaseHook
from airflow.sdk.bases.sensor import poke_mode_only
else:
from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
from airflow.sensors.base import poke_mode_only # type: ignore[no-redef]

if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import BaseOperator, BaseOperatorLink, BaseSensorOperator
from airflow.sdk.execution_time.xcom import XCom
else:
from airflow.models import BaseOperator, XCom
from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]

__all__ = [
"AIRFLOW_V_3_0_PLUS",
"AIRFLOW_V_3_1_PLUS",
"BaseHook",
"BaseOperator",
"BaseOperatorLink",
"BaseSensorOperator",
"poke_mode_only",
"XCom",
"AIRFLOW_V_3_1_1_PLUS",
]
15 changes: 1 addition & 14 deletions providers/amazon/tests/system/amazon/aws/example_appflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
from __future__ import annotations

from datetime import datetime
from typing import TYPE_CHECKING

from airflow.providers.amazon.aws.operators.appflow import (
AppflowRecordsShortCircuitOperator,
Expand All @@ -26,21 +25,9 @@
AppflowRunDailyOperator,
AppflowRunFullOperator,
)
from airflow.providers.common.compat.sdk import DAG, chain
from airflow.providers.standard.operators.bash import BashOperator

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if TYPE_CHECKING:
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG
else:
if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import DAG, chain
else:
# Airflow 2.10 compat
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG

from system.amazon.aws.utils import SystemTestContextBuilder

sys_test_context_task = SystemTestContextBuilder().build()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,23 +17,11 @@
from __future__ import annotations

from datetime import datetime
from typing import TYPE_CHECKING

from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator
from airflow.providers.amazon.aws.transfers.azure_blob_to_s3 import AzureBlobStorageToS3Operator
from airflow.providers.common.compat.sdk import DAG, chain

from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS

if TYPE_CHECKING:
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG
else:
if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import DAG, chain
else:
# Airflow 2.10 compat
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG
try:
from airflow.sdk import TriggerRule
except ImportError:
Expand Down
Loading