Skip to content

Commit

Permalink
Merge branch 'azure-remove-deprecations' of github.com:astronomer/air…
Browse files Browse the repository at this point in the history
…flow into azure-remove-deprecations
  • Loading branch information
vatsrahul1001 committed Dec 7, 2024
2 parents da757d7 + 2c35fda commit d0e3241
Show file tree
Hide file tree
Showing 89 changed files with 563 additions and 1,937 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -675,7 +675,7 @@ repos:
^contributing-docs/03_contributors_quick_start.rst$|
^.*\.(png|gif|jp[e]?g|tgz|lock)$|
git|
^newsfragments/43368\.significant\.rst$
^newsfragments/43349\.significant\.rst$
- id: check-base-operator-partial-arguments
name: Check BaseOperator and partial() arguments
language: python
Expand Down
39 changes: 30 additions & 9 deletions airflow/datasets/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,38 @@
# lib.) This is required by some IDEs to resolve the import paths.
from __future__ import annotations

import importlib
import warnings

from airflow.sdk.definitions.asset import AssetAlias as DatasetAlias, Dataset

# TODO: Remove this module in Airflow 3.2

warnings.warn(
"Import from the airflow.dataset module is deprecated and "
"will be removed in the Airflow 3.2. Please import it from 'airflow.sdk.definitions.asset'.",
DeprecationWarning,
stacklevel=2,
)
_names_moved = {
"DatasetAlias": ("airflow.sdk.definitions.asset", "AssetAlias"),
"DatasetAll": ("airflow.sdk.definitions.asset", "AssetAll"),
"DatasetAny": ("airflow.sdk.definitions.asset", "DatasetAny"),
"Dataset": ("airflow.sdk.definitions.asset", "Asset"),
"expand_alias_to_datasets": ("airflow.models.asset", "expand_alias_to_assets"),
}


def __getattr__(name: str):
# PEP-562: Lazy loaded attributes on python modules
if name not in _names_moved:
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")

module_path, new_name = _names_moved[name]
warnings.warn(
f"Import 'airflow.dataset.{name}' is deprecated and "
f"will be removed in the Airflow 3.2. Please import it from '{module_path}.{new_name}'.",
DeprecationWarning,
stacklevel=2,
)
mod = importlib.import_module(module_path, __name__)
val = getattr(mod, new_name)

# Store for next time
globals()[name] = val
return val


__all__ = ["Dataset", "DatasetAlias"]
__all__ = ["Dataset", "DatasetAlias", "DatasetAll", "DatasetAny", "expand_alias_to_datasets"]
21 changes: 21 additions & 0 deletions airflow/decorators/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -398,6 +398,12 @@ def _validate_arg_names(self, func: ValidationSource, kwargs: dict[str, Any]):
super()._validate_arg_names(func, kwargs)

def expand(self, **map_kwargs: OperatorExpandArgument) -> XComArg:
if self.kwargs.get("trigger_rule") == TriggerRule.ALWAYS and any(
[isinstance(expanded, XComArg) for expanded in map_kwargs.values()]
):
raise ValueError(
"Task-generated mapping within a task using 'expand' is not allowed with trigger rule 'always'."
)
if not map_kwargs:
raise TypeError("no arguments to expand against")
self._validate_arg_names("expand", map_kwargs)
Expand All @@ -411,6 +417,21 @@ def expand(self, **map_kwargs: OperatorExpandArgument) -> XComArg:
return self._expand(DictOfListsExpandInput(map_kwargs), strict=False)

def expand_kwargs(self, kwargs: OperatorExpandKwargsArgument, *, strict: bool = True) -> XComArg:
if (
self.kwargs.get("trigger_rule") == TriggerRule.ALWAYS
and not isinstance(kwargs, XComArg)
and any(
[
isinstance(v, XComArg)
for kwarg in kwargs
if not isinstance(kwarg, XComArg)
for v in kwarg.values()
]
)
):
raise ValueError(
"Task-generated mapping within a task using 'expand_kwargs' is not allowed with trigger rule 'always'."
)
if isinstance(kwargs, Sequence):
for item in kwargs:
if not isinstance(item, (XComArg, Mapping)):
Expand Down
3 changes: 1 addition & 2 deletions airflow/ui/src/layouts/Nav/Nav.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ import { Box, Flex, VStack, Link } from "@chakra-ui/react";
import { FiCornerUpLeft, FiDatabase, FiHome, FiSettings } from "react-icons/fi";

import { useVersionServiceGetVersion } from "openapi/queries";

import { AirflowPin } from "src/assets/AirflowPin";
import { DagIcon } from "src/assets/DagIcon";

Expand Down Expand Up @@ -89,4 +88,4 @@ export const Nav = () => {
</Flex>
</VStack>
);
}
};
12 changes: 6 additions & 6 deletions contributing-docs/testing/unit_tests.rst
Original file line number Diff line number Diff line change
Expand Up @@ -338,10 +338,10 @@ If your test accesses the database but is not marked properly the Non-DB test in
How to verify if DB test is correctly classified
................................................

When you add if you want to see if your DB test is correctly classified, you can run the test or group
If you want to see if your DB test is correctly classified, you can run the test or group
of tests with ``--skip-db-tests`` flag.

You can run the all (or subset of) test types if you want to make sure all ot the problems are fixed
You can run the all (or subset of) test types if you want to make sure all of the problems are fixed

.. code-block:: bash
Expand Down Expand Up @@ -458,8 +458,8 @@ Do this:
Problems with Non-DB test collection
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

Sometimes, even if whole module is marked as ``@pytest.mark.db_test`` even parsing the file and collecting
tests will fail when ``--skip-db-tests`` is used because some of the imports od objects created in the
Sometimes, even if the whole module is marked as ``@pytest.mark.db_test``, parsing the file and collecting
tests will fail when ``--skip-db-tests`` is used because some of the imports or objects created in the
module will read the database.

Usually what helps is to move such initialization code to inside the tests or pytest fixtures (and pass
Expand Down Expand Up @@ -1086,9 +1086,9 @@ directly to the container.
Implementing compatibility for provider tests for older Airflow versions
........................................................................

When you implement tests for providers, you should make sure that they are compatible with older
When you implement tests for providers, you should make sure that they are compatible with older Airflow versions.

Note that some of the tests if written without taking care about the compatibility, might not work with older
Note that some of the tests, if written without taking care about the compatibility, might not work with older
versions of Airflow - this is because of refactorings, renames, and tests relying on internals of Airflow that
are not part of the public API. We deal with it in one of the following ways:

Expand Down
4 changes: 2 additions & 2 deletions docs/apache-airflow-providers-microsoft-mssql/operators.rst
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ The purpose of this guide is to define tasks involving interactions with the MSS
Use the :class:`SQLExecuteQueryOperator <airflow.providers.common.sql.operators.sql>` to execute
SQL commands in MSSQL database.

.. warning::
Previously, MsSqlOperator was used to perform this kind of operation. But at the moment MsSqlOperator is deprecated and will be removed in future versions of the provider. Please consider to switch to SQLExecuteQueryOperator as soon as possible.
.. note::
Previously, ``MsSqlOperator`` was used to perform this kind of operation. Please use ``SQLExecuteQueryOperator`` instead.

Common Database Operations with SQLExecuteQueryOperator
-------------------------------------------------------
Expand Down
14 changes: 0 additions & 14 deletions docs/apache-airflow-providers-oracle/operators/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -22,20 +22,6 @@ Oracle Operators
================
The Oracle connection type provides connection to a Oracle database.

Execute SQL in an Oracle database
---------------------------------

To execute arbitrary SQL in an Oracle database, use the
:class:`~airflow.providers.oracle.operators.oracle.OracleOperator`.

An example of executing a simple query is as follows:

.. exampleinclude:: /../../providers/src/airflow/providers/oracle/example_dags/example_oracle.py
:language: python
:start-after: [START howto_oracle_operator]
:end-before: [END howto_oracle_operator]


Execute a Stored Procedure in an Oracle database
------------------------------------------------

Expand Down
4 changes: 2 additions & 2 deletions docs/apache-airflow-providers-sqlite/operators.rst
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ SQLExecuteQueryOperator to connect to Sqlite
Use the :class:`SQLExecuteQueryOperator<airflow.providers.common.sql.operators.sql>` to execute
Sqlite commands in a `Sqlite <https://sqlite.org/lang.html>`__ database.

.. warning::
Previously, SqliteOperator was used to perform this kind of operation. But at the moment SqliteOperator is deprecated and will be removed in future versions of the provider. Please consider to switch to SQLExecuteQueryOperator as soon as possible.
.. note::
Previously, ``SqliteOperator`` was used to perform this kind of operation. After deprecation this has been removed. Please use ``SQLExecuteQueryOperator`` instead.

Using the Operator
^^^^^^^^^^^^^^^^^^
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,6 @@ The grid view also provides visibility into your mapped tasks in the details pan

Although we show a "reduce" task here (``sum_it``) you don't have to have one, the mapped tasks will still be executed even if they have no downstream tasks.

.. warning:: ``TriggerRule.ALWAYS`` cannot be utilized in expanded tasks

Assigning ``trigger_rule=TriggerRule.ALWAYS`` in expanded tasks is forbidden, as expanded parameters will be undefined with the task's immediate execution.
This is enforced at the time of the DAG parsing, and will raise an error if you try to use it.

Task-generated Mapping
----------------------
Expand All @@ -113,6 +109,12 @@ The above examples we've shown could all be achieved with a ``for`` loop in the
The ``make_list`` task runs as a normal task and must return a list or dict (see `What data types can be expanded?`_), and then the ``consumer`` task will be called four times, once with each value in the return of ``make_list``.

.. warning:: Task-generated mapping cannot be utilized with ``TriggerRule.ALWAYS``

Assigning ``trigger_rule=TriggerRule.ALWAYS`` in task-generated mapping is not allowed, as expanded parameters are undefined with the task's immediate execution.
This is enforced at the time of the DAG parsing, for both tasks and mapped tasks groups, and will raise an error if you try to use it.
In the recent example, setting ``trigger_rule=TriggerRule.ALWAYS`` in the ``consumer`` task will raise an error since ``make_list`` is a task-generated mapping.

Repeated mapping
----------------

Expand Down
File renamed without changes.
1 change: 1 addition & 0 deletions newsfragments/44751.bugfix.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
``TriggerRule.ALWAYS`` cannot be utilized within a task-generated mapping, either in bare tasks (fixed in this PR) or mapped task groups (fixed in PR #44368). The issue with doing so, is that the task is immediately executed without waiting for the upstreams's mapping results, which certainly leads to failure of the task. This fix avoids it by raising an exception when it is detected during DAG parsing.
11 changes: 11 additions & 0 deletions providers/src/airflow/providers/apache/druid/CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,17 @@
Changelog
---------

main
.....

.. warning::
All deprecated classes, parameters and features have been removed from the Apache Druid provider package.
The following breaking changes were introduced:

* Operators

* Removed ``DruidCheckOperator``. Please use ``airflow.providers.common.sql.operators.sql.SQLCheckOperator`` instead.

3.12.1
......

Expand Down

This file was deleted.

1 change: 0 additions & 1 deletion providers/src/airflow/providers/apache/druid/provider.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,6 @@ operators:
- integration-name: Apache Druid
python-modules:
- airflow.providers.apache.druid.operators.druid
- airflow.providers.apache.druid.operators.druid_check

hooks:
- integration-name: Apache Druid
Expand Down
8 changes: 8 additions & 0 deletions providers/src/airflow/providers/apache/hive/CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,14 @@

Changelog
---------
main
....

.. warning::
All deprecated classes, parameters and features have been removed from the {provider_name} provider package.
The following breaking changes were introduced:

* Removed deprecated ``GSSAPI`` for ``auth_mechanism.`` Use ``KERBEROS`` instead.

8.2.1
.....
Expand Down
8 changes: 0 additions & 8 deletions providers/src/airflow/providers/apache/hive/hooks/hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -873,14 +873,6 @@ def get_conn(self, schema: str | None = None) -> Any:
auth_mechanism = db.extra_dejson.get("auth_mechanism", "KERBEROS")
kerberos_service_name = db.extra_dejson.get("kerberos_service_name", "hive")

# pyhive uses GSSAPI instead of KERBEROS as a auth_mechanism identifier
if auth_mechanism == "GSSAPI":
self.log.warning(
"Detected deprecated 'GSSAPI' for auth_mechanism for %s. Please use 'KERBEROS' instead",
self.hiveserver2_conn_id, # type: ignore
)
auth_mechanism = "KERBEROS"

# Password should be set if and only if in LDAP or CUSTOM mode
if auth_mechanism in ("LDAP", "CUSTOM"):
password = db.password
Expand Down
14 changes: 14 additions & 0 deletions providers/src/airflow/providers/apprise/CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,20 @@
Changelog
---------

main
....

.. warning::
All deprecated classes, parameters and features have been removed from the {provider_name} provider package.
The following breaking changes were introduced:

* Hooks
* Parameter ``tag`` cannot be None. It is not set to MATCH_ALL_TAG as default.
* Notifications
* Parameter ``notify_type`` cannot be None. It is not set to NotifyType.INFO as default.
* Parameter ``body_format`` cannot be None. It is not set to NotifyFormat.TEXT as default.
* Parameter ``tag`` cannot be None. It is not set to MATCH_ALL_TAG as default.

1.4.1
.....

Expand Down
12 changes: 1 addition & 11 deletions providers/src/airflow/providers/apprise/hooks/apprise.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,12 @@
from __future__ import annotations

import json
import warnings
from collections.abc import Iterable
from typing import TYPE_CHECKING, Any

import apprise
from apprise import AppriseConfig, NotifyFormat, NotifyType

from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook

if TYPE_CHECKING:
Expand Down Expand Up @@ -77,7 +75,7 @@ def notify(
title: str | None = None,
notify_type: NotifyType = NotifyType.INFO,
body_format: NotifyFormat = NotifyFormat.TEXT,
tag: str | Iterable[str] | None = None,
tag: str | Iterable[str] = "all",
attach: AppriseAttachment | None = None,
interpret_escapes: bool | None = None,
config: AppriseConfig | None = None,
Expand All @@ -97,14 +95,6 @@ def notify(
sequences such as \n and \r to their respective ascii new-line and carriage return characters
:param config: Specify one or more configuration
"""
if tag is None:
warnings.warn(
"`tag` cannot be None. Assign it to be MATCH_ALL_TAG",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
tag = "all"

title = title or ""

apprise_obj = apprise.Apprise()
Expand Down
Loading

0 comments on commit d0e3241

Please sign in to comment.