Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -46,5 +46,6 @@
except (ImportError, ModuleNotFoundError):
from airflow.providers.standard.operators.python import get_current_context

from airflow.providers.common.compat.version_compat import BaseOperator

__all__ = ["PythonOperator", "_SERIALIZERS", "ShortCircuitOperator", "get_current_context"]
__all__ = ["BaseOperator", "PythonOperator", "_SERIALIZERS", "ShortCircuitOperator", "get_current_context"]
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,15 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:


AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
AIRFLOW_V_3_1_PLUS = get_base_airflow_version_tuple() >= (3, 1, 0)

if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import BaseOperator
else:
from airflow.models import BaseOperator

__all__ = [
"AIRFLOW_V_3_0_PLUS",
"AIRFLOW_V_3_1_PLUS",
"BaseOperator",
]
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,16 @@
from collections.abc import Sequence
from typing import TYPE_CHECKING

from airflow.providers.common.io.version_compat import AIRFLOW_V_3_0_PLUS
from airflow.providers.common.io.version_compat import AIRFLOW_V_3_0_PLUS, BaseOperator

if TYPE_CHECKING:
from airflow.providers.openlineage.extractors import OperatorLineage
from airflow.sdk import Context

if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import ObjectStoragePath
from airflow.sdk.bases.operator import BaseOperator
else:
from airflow.io.path import ObjectStoragePath # type: ignore[no-redef]
from airflow.models import BaseOperator # type: ignore[no-redef]


class FileTransferOperator(BaseOperator):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,13 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:


AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)

if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import BaseOperator
else:
from airflow.models import BaseOperator

__all__ = [
"AIRFLOW_V_3_0_PLUS",
"BaseOperator",
]
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@

from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
from airflow.models import BaseOperator
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.providers.common.sql.triggers.sql import SQLExecuteQueryTrigger
from airflow.providers.common.sql.version_compat import BaseOperator

if TYPE_CHECKING:
import jinja2
Expand Down Expand Up @@ -192,7 +192,7 @@ def execute_complete(
)

self.log.info("Offset increased to %d", offset)
self.xcom_push(context=context, key="offset", value=offset)
context["ti"].xcom_push(key="offset", value=offset)

self.log.info("Inserting %d rows into %s", len(results), self.destination_conn_id)
self.destination_hook.insert_rows(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,10 @@

from airflow.exceptions import AirflowException, AirflowFailException
from airflow.hooks.base import BaseHook
from airflow.models import BaseOperator, SkipMixin
from airflow.models import SkipMixin
from airflow.providers.common.sql.hooks.handlers import fetch_all_handler, return_single_query_results
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.providers.common.sql.version_compat import BaseOperator
from airflow.utils.helpers import merge_dicts

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,15 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:


AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)

if AIRFLOW_V_3_0_PLUS:
from airflow.sdk import BaseOperator, BaseSensorOperator
else:
from airflow.models import BaseOperator
from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]

__all__ = [
"AIRFLOW_V_3_0_PLUS",
"BaseOperator",
"BaseSensorOperator",
]
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,6 @@

@pytest.mark.backend("mysql")
class TestMySql:
def setup_method(self):
args = {"owner": "airflow", "start_date": DEFAULT_DATE}
dag = DAG(TEST_DAG_ID, schedule=None, default_args=args)
self.dag = dag

def teardown_method(self):
from airflow.providers.mysql.hooks.mysql import MySqlHook

Expand All @@ -77,7 +72,7 @@ def teardown_method(self):
"mysql-connector-python",
],
)
def test_mysql_to_mysql(self, client):
def test_mysql_to_mysql(self, client, dag_maker):
class MySqlContext:
def __init__(self, client):
self.client = client
Expand All @@ -92,6 +87,25 @@ def __exit__(self, exc_type, exc_val, exc_tb):

with MySqlContext(client):
sql = "SELECT * FROM connection;"
with dag_maker(f"TEST_DAG_ID_{client}", start_date=DEFAULT_DATE):
op = GenericTransfer(
task_id="test_m2m",
preoperator=[
"DROP TABLE IF EXISTS test_mysql_to_mysql",
"CREATE TABLE IF NOT EXISTS test_mysql_to_mysql LIKE connection",
],
source_conn_id="airflow_db",
destination_conn_id="airflow_db",
destination_table="test_mysql_to_mysql",
sql=sql,
)

dag_maker.run_ti(op.task_id)

@mock.patch("airflow.providers.common.sql.hooks.sql.DbApiHook.insert_rows")
def test_mysql_to_mysql_replace(self, mock_insert, dag_maker):
sql = "SELECT * FROM connection LIMIT 10;"
with dag_maker("TEST_DAG_ID", start_date=DEFAULT_DATE):
op = GenericTransfer(
task_id="test_m2m",
preoperator=[
Expand All @@ -102,27 +116,10 @@ def __exit__(self, exc_type, exc_val, exc_tb):
destination_conn_id="airflow_db",
destination_table="test_mysql_to_mysql",
sql=sql,
dag=self.dag,
insert_args={"replace": True},
)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)

@mock.patch("airflow.providers.common.sql.hooks.sql.DbApiHook.insert_rows")
def test_mysql_to_mysql_replace(self, mock_insert):
sql = "SELECT * FROM connection LIMIT 10;"
op = GenericTransfer(
task_id="test_m2m",
preoperator=[
"DROP TABLE IF EXISTS test_mysql_to_mysql",
"CREATE TABLE IF NOT EXISTS test_mysql_to_mysql LIKE connection",
],
source_conn_id="airflow_db",
destination_conn_id="airflow_db",
destination_table="test_mysql_to_mysql",
sql=sql,
dag=self.dag,
insert_args={"replace": True},
)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
dag_maker.run_ti(op.task_id)
assert mock_insert.called
_, kwargs = mock_insert.call_args
assert "replace" in kwargs
Expand All @@ -140,7 +137,7 @@ def teardown_method(self):
def test_postgres_to_postgres(self, dag_maker):
sql = "SELECT * FROM INFORMATION_SCHEMA.TABLES LIMIT 100;"
with dag_maker(default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, serialized=True):
op = GenericTransfer(
_ = GenericTransfer(
task_id="test_p2p",
preoperator=[
"DROP TABLE IF EXISTS test_postgres_to_postgres",
Expand All @@ -151,14 +148,14 @@ def test_postgres_to_postgres(self, dag_maker):
destination_table="test_postgres_to_postgres",
sql=sql,
)
dag_maker.create_dagrun()
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
dr = dag_maker.create_dagrun()
dag_maker.run_ti("test_p2p", dr)

@mock.patch("airflow.providers.common.sql.hooks.sql.DbApiHook.insert_rows")
def test_postgres_to_postgres_replace(self, mock_insert, dag_maker):
sql = "SELECT id, conn_id, conn_type FROM connection LIMIT 10;"
with dag_maker(default_args={"owner": "airflow", "start_date": DEFAULT_DATE}, serialized=True):
op = GenericTransfer(
_ = GenericTransfer(
task_id="test_p2p",
preoperator=[
"DROP TABLE IF EXISTS test_postgres_to_postgres",
Expand All @@ -174,8 +171,8 @@ def test_postgres_to_postgres_replace(self, mock_insert, dag_maker):
"replace_index": "id",
},
)
dag_maker.create_dagrun()
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
dr = dag_maker.create_dagrun()
dag_maker.run_ti("test_p2p", dr)
assert mock_insert.called
_, kwargs = mock_insert.call_args
assert "replace" in kwargs
Expand Down
57 changes: 42 additions & 15 deletions providers/common/sql/tests/unit/common/sql/operators/test_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -1095,6 +1095,30 @@ def setup_method(self):
self.branch_2 = EmptyOperator(task_id="branch_2", dag=self.dag)
self.branch_3 = None

def get_ti(self, task_id, dr=None):
if dr is None:
if AIRFLOW_V_3_0_PLUS:
dagrun_kwargs = {
"logical_date": DEFAULT_DATE,
"run_after": DEFAULT_DATE,
"triggered_by": DagRunTriggeredByType.TEST,
}
else:
dagrun_kwargs = {"execution_date": DEFAULT_DATE}
dr = self.dag.create_dagrun(
run_id=f"manual__{timezone.utcnow().isoformat()}",
run_type=DagRunType.MANUAL,
start_date=timezone.utcnow(),
state=State.RUNNING,
data_interval=(DEFAULT_DATE, DEFAULT_DATE),
**dagrun_kwargs,
)

ti = dr.get_task_instance(task_id)
ti.task = self.dag.get_task(ti.task_id)

return ti

def teardown_method(self):
with create_session() as session:
session.query(DagRun).delete()
Expand Down Expand Up @@ -1124,7 +1148,7 @@ def test_unsupported_conn_type(self):
)

with pytest.raises(AirflowException):
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
op.execute({})

def test_invalid_conn(self):
"""Check if BranchSQLOperator throws an exception for invalid connection"""
Expand All @@ -1138,7 +1162,7 @@ def test_invalid_conn(self):
)

with pytest.raises(AirflowException):
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
op.execute({})

def test_invalid_follow_task_true(self):
"""Check if BranchSQLOperator throws an exception for invalid connection"""
Expand All @@ -1152,7 +1176,7 @@ def test_invalid_follow_task_true(self):
)

with pytest.raises(AirflowException):
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
op.execute({})

def test_invalid_follow_task_false(self):
"""Check if BranchSQLOperator throws an exception for invalid connection"""
Expand All @@ -1166,12 +1190,13 @@ def test_invalid_follow_task_false(self):
)

with pytest.raises(AirflowException):
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
op.execute({})

@pytest.mark.backend("mysql")
def test_sql_branch_operator_mysql(self, branch_op):
"""Check if BranchSQLOperator works with backend"""
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)

branch_op.execute({"ti": mock.MagicMock(task=branch_op)})

@pytest.mark.backend("postgres")
def test_sql_branch_operator_postgres(self):
Expand All @@ -1184,7 +1209,7 @@ def test_sql_branch_operator_postgres(self):
follow_task_ids_if_false=["branch_2"],
dag=self.dag,
)
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
self.get_ti(branch_op.task_id).run()

@mock.patch("airflow.providers.common.sql.operators.sql.BaseSQLOperator.get_db_hook")
def test_branch_single_value_with_dag_run(self, mock_get_db_hook, branch_op):
Expand Down Expand Up @@ -1223,8 +1248,9 @@ def test_branch_single_value_with_dag_run(self, mock_get_db_hook, branch_op):

assert exc_info.value.tasks == [("branch_2", -1)]
else:
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
self.get_ti(branch_op.task_id, dr).run()
tis = dr.get_task_instances()

for ti in tis:
if ti.task_id == "make_choice":
assert ti.state == State.SUCCESS
Expand Down Expand Up @@ -1267,11 +1293,11 @@ def test_branch_true_with_dag_run(self, mock_get_db_hook, true_value, branch_op)
from airflow.exceptions import DownstreamTasksSkipped

with pytest.raises(DownstreamTasksSkipped) as exc_info:
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
branch_op.execute({})

assert exc_info.value.tasks == [("branch_2", -1)]
else:
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
self.get_ti(branch_op.task_id, dr).run()
tis = dr.get_task_instances()
for ti in tis:
if ti.task_id == "make_choice":
Expand Down Expand Up @@ -1315,11 +1341,12 @@ def test_branch_false_with_dag_run(self, mock_get_db_hook, false_value, branch_o
from airflow.exceptions import DownstreamTasksSkipped

with pytest.raises(DownstreamTasksSkipped) as exc_info:
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
branch_op.execute({})
assert exc_info.value.tasks == [("branch_1", -1)]
else:
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
self.get_ti(branch_op.task_id, dr).run()
tis = dr.get_task_instances()

for ti in tis:
if ti.task_id == "make_choice":
assert ti.state == State.SUCCESS
Expand Down Expand Up @@ -1375,7 +1402,7 @@ def test_branch_list_with_dag_run(self, mock_get_db_hook):
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
assert exc_info.value.tasks == [("branch_3", -1)]
else:
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
self.get_ti(branch_op.task_id, dr).run()
tis = dr.get_task_instances()
for ti in tis:
if ti.task_id == "make_choice":
Expand Down Expand Up @@ -1416,7 +1443,7 @@ def test_invalid_query_result_with_dag_run(self, mock_get_db_hook, branch_op):
mock_get_records.return_value = ["Invalid Value"]

with pytest.raises(AirflowException):
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
branch_op.execute({})

@mock.patch("airflow.providers.common.sql.operators.sql.BaseSQLOperator.get_db_hook")
def test_with_skip_in_branch_downstream_dependencies(self, mock_get_db_hook, branch_op):
Expand Down Expand Up @@ -1447,7 +1474,7 @@ def test_with_skip_in_branch_downstream_dependencies(self, mock_get_db_hook, bra
for true_value in SUPPORTED_TRUE_VALUES:
mock_get_records.return_value = [true_value]

branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
self.get_ti(branch_op.task_id, dr).run()

tis = dr.get_task_instances()
for ti in tis:
Expand Down Expand Up @@ -1493,7 +1520,7 @@ def test_with_skip_in_branch_downstream_dependencies2(self, mock_get_db_hook, fa
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
assert exc_info.value.tasks == [("branch_1", -1)]
else:
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
self.get_ti(branch_op.task_id, dr).run()
tis = dr.get_task_instances()
for ti in tis:
if ti.task_id == "make_choice":
Expand Down
Loading