Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
from collections.abc import Iterable, Sequence
from typing import IO, TYPE_CHECKING, Any, AnyStr

from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.data_lake import AzureDataLakeHook, AzureDataLakeStorageV2Hook
from airflow.providers.microsoft.azure.version_compat import BaseOperator

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
from typing import TYPE_CHECKING

from airflow.configuration import conf
from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.adx import AzureDataExplorerHook
from airflow.providers.microsoft.azure.version_compat import BaseOperator

if TYPE_CHECKING:
from azure.kusto.data._models import KustoResultTable
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
from typing import TYPE_CHECKING, Any
from uuid import UUID

from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.asb import AdminClientHook, MessageHook
from airflow.providers.microsoft.azure.version_compat import BaseOperator

if TYPE_CHECKING:
import datetime
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
from azure.batch import models as batch_models

from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.batch import AzureBatchHook
from airflow.providers.microsoft.azure.version_compat import BaseOperator

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,10 @@
from msrestazure.azure_exceptions import CloudError

from airflow.exceptions import AirflowException, AirflowTaskTimeout
from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.container_instance import AzureContainerInstanceHook
from airflow.providers.microsoft.azure.hooks.container_registry import AzureContainerRegistryHook
from airflow.providers.microsoft.azure.hooks.container_volume import AzureContainerVolumeHook
from airflow.providers.microsoft.azure.version_compat import BaseOperator

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
from collections.abc import Sequence
from typing import TYPE_CHECKING

from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.cosmos import AzureCosmosDBHook
from airflow.providers.microsoft.azure.version_compat import BaseOperator

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,14 @@
from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.data_factory import (
AzureDataFactoryHook,
AzureDataFactoryPipelineRunException,
AzureDataFactoryPipelineRunStatus,
get_field,
)
from airflow.providers.microsoft.azure.triggers.data_factory import AzureDataFactoryTrigger
from airflow.providers.microsoft.azure.version_compat import BaseOperator
from airflow.utils.log.logging_mixin import LoggingMixin

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,12 @@
)

from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, TaskDeferred
from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.msgraph import KiotaRequestAdapterHook
from airflow.providers.microsoft.azure.triggers.msgraph import (
MSGraphTrigger,
ResponseSerializer,
)
from airflow.providers.microsoft.azure.version_compat import BaseOperator
from airflow.utils.xcom import XCOM_RETURN_KEY

if TYPE_CHECKING:
Expand Down Expand Up @@ -307,7 +307,7 @@ def push_xcom(self, context: Any, value) -> None:
self.key,
value,
)
self.xcom_push(context=context, key=self.key, value=value)
context["ti"].xcom_push(key=self.key, value=value)

@staticmethod
def paginate(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,13 @@
from typing import TYPE_CHECKING, Any

from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.powerbi import PowerBIHook
from airflow.providers.microsoft.azure.triggers.powerbi import (
PowerBIDatasetListTrigger,
PowerBITrigger,
PowerBIWorkspaceListTrigger,
)
from airflow.providers.microsoft.azure.version_compat import BaseOperator

if TYPE_CHECKING:
from msgraph_core import APIVersion
Expand Down Expand Up @@ -141,8 +141,7 @@ def get_refresh_status(self, context: Context, event: dict[str, str] | None = No
dataset_refresh_id = event["dataset_refresh_id"]

if dataset_refresh_id:
self.xcom_push(
context=context,
context["ti"].xcom_push(
key=f"{self.task_id}.powerbi_dataset_refresh_Id",
value=dataset_refresh_id,
)
Expand All @@ -168,8 +167,7 @@ def execute_complete(self, context: Context, event: dict[str, str]) -> Any:
Relies on trigger to throw an exception, otherwise it assumes execution was successful.
"""
if event:
self.xcom_push(
context=context,
context["ti"].xcom_push(
key=f"{self.task_id}.powerbi_dataset_refresh_status",
value=event["dataset_refresh_status"],
)
Expand Down Expand Up @@ -235,8 +233,7 @@ def execute_complete(self, context: Context, event: dict[str, str]) -> Any:
Relies on trigger to throw an exception, otherwise it assumes execution was successful.
"""
if event:
self.xcom_push(
context=context,
context["ti"].xcom_push(
key=f"{self.task_id}.powerbi_workspace_ids",
value=event["workspace_ids"],
)
Expand Down Expand Up @@ -306,8 +303,7 @@ def execute_complete(self, context: Context, event: dict[str, str]) -> Any:
Relies on trigger to throw an exception, otherwise it assumes execution was successful.
"""
if event:
self.xcom_push(
context=context,
context["ti"].xcom_push(
key=f"{self.task_id}.powerbi_dataset_ids",
value=event["dataset_ids"],
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,14 @@

from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.synapse import (
AzureSynapseHook,
AzureSynapsePipelineHook,
AzureSynapsePipelineRunException,
AzureSynapsePipelineRunStatus,
AzureSynapseSparkBatchRunStatus,
)
from airflow.providers.microsoft.azure.version_compat import BaseOperator

if TYPE_CHECKING:
from azure.synapse.spark.models import SparkBatchJobOptions
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
from collections.abc import Sequence
from typing import TYPE_CHECKING, Any

from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
from airflow.providers.microsoft.azure.version_compat import BaseOperator

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
from typing import TYPE_CHECKING, Any

from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.data_lake import AzureDataLakeHook
from airflow.providers.microsoft.azure.version_compat import BaseOperator

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
from collections.abc import Sequence
from typing import TYPE_CHECKING

from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
from airflow.providers.microsoft.azure.version_compat import BaseOperator

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
from tempfile import TemporaryDirectory
from typing import TYPE_CHECKING, Any

from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.data_lake import AzureDataLakeHook
from airflow.providers.microsoft.azure.version_compat import BaseOperator
from airflow.providers.oracle.hooks.oracle import OracleHook

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@
from functools import cached_property
from typing import TYPE_CHECKING

from airflow.models import BaseOperator
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
from airflow.providers.microsoft.azure.version_compat import BaseOperator

if TYPE_CHECKING:
from airflow.utils.context import Context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
from airflow.providers.microsoft.azure.version_compat import AIRFLOW_V_3_0_PLUS
from airflow.providers.sftp.hooks.sftp import SFTPHook

WILDCARD = "*"
Expand Down Expand Up @@ -98,6 +99,8 @@ def __init__(
self.create_container = create_container

def dry_run(self) -> None:
if not AIRFLOW_V_3_0_PLUS:
raise NotImplementedError("Not implemented for Airflow 3.")
super().dry_run()
sftp_files: list[SftpFile] = self.get_sftp_files_map()
for file in sftp_files:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,20 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:


AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)

if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import (
BaseOperator,
BaseOperatorLink,
BaseSensorOperator,
)
else:
from airflow.models import BaseOperator, BaseOperatorLink # type: ignore[no-redef]
from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]

__all__ = [
"AIRFLOW_V_3_0_PLUS",
"BaseOperator",
"BaseOperatorLink",
"BaseSensorOperator",
]
Loading