From 00b8452b7526166b2918976b991aff1124d1a17c Mon Sep 17 00:00:00 2001 From: Gopal Dirisala <39794726+dirrao@users.noreply.github.com> Date: Mon, 7 Oct 2024 10:38:52 +0530 Subject: [PATCH 001/125] Removed unicodecsv dependency for providers with Airflow version 2.8.0 and above (#42765) --- 3rd-party-licenses/LICENSE-unicodecsv.txt | 25 ----------------------- hatch_build.py | 5 ----- 2 files changed, 30 deletions(-) delete mode 100644 3rd-party-licenses/LICENSE-unicodecsv.txt diff --git a/3rd-party-licenses/LICENSE-unicodecsv.txt b/3rd-party-licenses/LICENSE-unicodecsv.txt deleted file mode 100644 index 6d004c776de0a..0000000000000 --- a/3rd-party-licenses/LICENSE-unicodecsv.txt +++ /dev/null @@ -1,25 +0,0 @@ -Copyright 2010 Jeremy Dunck. All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are -permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, this list of - conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright notice, this list - of conditions and the following disclaimer in the documentation and/or other materials - provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY JEREMY DUNCK ``AS IS'' AND ANY EXPRESS OR IMPLIED -WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JEREMY DUNCK OR -CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF -ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -The views and conclusions contained in the software and documentation are those of the -authors and should not be interpreted as representing official policies, either expressed -or implied, of Jeremy Dunck. diff --git a/hatch_build.py b/hatch_build.py index 5309e9d1b08cb..68c88b502aef3 100644 --- a/hatch_build.py +++ b/hatch_build.py @@ -434,11 +434,6 @@ "tabulate>=0.7.5", "tenacity>=8.0.0,!=8.2.0", "termcolor>=1.1.0", - # We should remove this dependency when Providers are limited to Airflow 2.7+ - # as we replaced the usage of unicodecsv with csv in Airflow 2.7 - # See https://github.com/apache/airflow/pull/31693 - # We should also remove "3rd-party-licenses/LICENSE-unicodecsv.txt" file when we remove this dependency - "unicodecsv>=0.14.1", # Universal Pathlib 0.2.4 adds extra validation for Paths and our integration with local file paths # Does not work with it Tracked in https://github.com/fsspec/universal_pathlib/issues/276 "universal-pathlib>=0.2.2,!=0.2.4", From 46ccc73381f6be3acfb8781d14fb6f7915a5cb68 Mon Sep 17 00:00:00 2001 From: Gopal Dirisala <39794726+dirrao@users.noreply.github.com> Date: Mon, 7 Oct 2024 10:40:02 +0530 Subject: [PATCH 002/125] Removed deprecated k8s rendering methods from task instance module (#42641) * Removed deprecated k8s rendering methods from task instance module * task k8s rendering unit tests update --- airflow/models/taskinstance.py | 47 ------------------- .../kubernetes/test_template_rendering.py | 7 +-- 2 files changed, 4 insertions(+), 50 deletions(-) diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index 333a4cad91cbe..31f7b6535a9c2 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -3405,53 +3405,6 @@ def render_templates( return original_task - def render_k8s_pod_yaml(self) -> dict | None: - """Render the k8s pod yaml.""" - try: - from airflow.providers.cncf.kubernetes.template_rendering import ( - render_k8s_pod_yaml as render_k8s_pod_yaml_from_provider, - ) - except ImportError: - raise RuntimeError( - "You need to have the `cncf.kubernetes` provider installed to use this feature. " - "Also rather than calling it directly you should import " - "render_k8s_pod_yaml from airflow.providers.cncf.kubernetes.template_rendering " - "and call it with TaskInstance as the first argument." - ) - warnings.warn( - "You should not call `task_instance.render_k8s_pod_yaml` directly. This method will be removed" - "in Airflow 3. Rather than calling it directly you should import " - "`render_k8s_pod_yaml` from `airflow.providers.cncf.kubernetes.template_rendering` " - "and call it with `TaskInstance` as the first argument.", - DeprecationWarning, - stacklevel=2, - ) - return render_k8s_pod_yaml_from_provider(self) - - @provide_session - def get_rendered_k8s_spec(self, session: Session = NEW_SESSION): - """Render the k8s pod yaml.""" - try: - from airflow.providers.cncf.kubernetes.template_rendering import ( - get_rendered_k8s_spec as get_rendered_k8s_spec_from_provider, - ) - except ImportError: - raise RuntimeError( - "You need to have the `cncf.kubernetes` provider installed to use this feature. " - "Also rather than calling it directly you should import " - "`get_rendered_k8s_spec` from `airflow.providers.cncf.kubernetes.template_rendering` " - "and call it with `TaskInstance` as the first argument." - ) - warnings.warn( - "You should not call `task_instance.render_k8s_pod_yaml` directly. This method will be removed" - "in Airflow 3. Rather than calling it directly you should import " - "`get_rendered_k8s_spec` from `airflow.providers.cncf.kubernetes.template_rendering` " - "and call it with `TaskInstance` as the first argument.", - DeprecationWarning, - stacklevel=2, - ) - return get_rendered_k8s_spec_from_provider(self, session=session) - def get_email_subject_content( self, exception: BaseException, task: BaseOperator | None = None ) -> tuple[str, str, str]: diff --git a/tests/providers/cncf/kubernetes/test_template_rendering.py b/tests/providers/cncf/kubernetes/test_template_rendering.py index fb5cad744826e..ab2820284d553 100644 --- a/tests/providers/cncf/kubernetes/test_template_rendering.py +++ b/tests/providers/cncf/kubernetes/test_template_rendering.py @@ -25,6 +25,7 @@ from airflow.configuration import TEST_DAGS_FOLDER from airflow.models.renderedtifields import RenderedTaskInstanceFields, RenderedTaskInstanceFields as RTIF from airflow.operators.bash import BashOperator +from airflow.providers.cncf.kubernetes.template_rendering import get_rendered_k8s_spec, render_k8s_pod_yaml from airflow.utils.session import create_session from airflow.version import version from tests.models import DEFAULT_DATE @@ -82,7 +83,7 @@ def test_render_k8s_pod_yaml(pod_mutation_hook, create_task_instance): }, } - assert ti.render_k8s_pod_yaml() == expected_pod_spec + assert render_k8s_pod_yaml(ti) == expected_pod_spec pod_mutation_hook.assert_called_once_with(mock.ANY) @@ -100,7 +101,7 @@ def test_get_rendered_k8s_spec(render_k8s_pod_yaml, rtif_get_k8s_pod_yaml, creat session = mock.Mock() rtif_get_k8s_pod_yaml.return_value = fake_spec - assert ti.get_rendered_k8s_spec(session) == fake_spec + assert get_rendered_k8s_spec(ti, session=session) == fake_spec rtif_get_k8s_pod_yaml.assert_called_once_with(ti, session=session) render_k8s_pod_yaml.assert_not_called() @@ -109,7 +110,7 @@ def test_get_rendered_k8s_spec(render_k8s_pod_yaml, rtif_get_k8s_pod_yaml, creat rtif_get_k8s_pod_yaml.return_value = None render_k8s_pod_yaml.return_value = fake_spec - assert ti.get_rendered_k8s_spec(session) == fake_spec + assert get_rendered_k8s_spec(session) == fake_spec render_k8s_pod_yaml.assert_called_once() From e46006b25b025eee2feb5aedcb3bb01069a4b730 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sun, 6 Oct 2024 23:24:24 -0700 Subject: [PATCH 003/125] Add standard provider to chicken-egg-providers (#42760) Standard provider should be added to chicken-egg-providers as it has not yet been officially released, so we need to add it. This can only be done via "apache" PR as it is needed in build-images workflow. This is needed to unblock #42252 --- dev/breeze/src/airflow_breeze/global_constants.py | 2 +- prod_image_installed_providers.txt | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index 96e37c28fa868..7d1da86314005 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -502,7 +502,7 @@ def get_airflow_extras(): # END OF EXTRAS LIST UPDATED BY PRE COMMIT ] -CHICKEN_EGG_PROVIDERS = " ".join([]) +CHICKEN_EGG_PROVIDERS = " ".join(["standard"]) BASE_PROVIDERS_COMPATIBILITY_CHECKS: list[dict[str, str | list[str]]] = [ diff --git a/prod_image_installed_providers.txt b/prod_image_installed_providers.txt index 7340928738c11..5ba30a865f4f3 100644 --- a/prod_image_installed_providers.txt +++ b/prod_image_installed_providers.txt @@ -27,3 +27,4 @@ smtp snowflake sqlite ssh +standard From ad06949068a15d06391b2d058140e6061de22eeb Mon Sep 17 00:00:00 2001 From: Brent Bovenzi Date: Mon, 7 Oct 2024 16:30:59 +0200 Subject: [PATCH 004/125] Remove burdensome eslint rules (#42795) --- airflow/ui/.prettierignore | 1 + airflow/ui/eslint.config.js | 2 +- airflow/ui/rules/core.js | 18 ------------------ airflow/ui/rules/react.js | 7 ------- airflow/ui/src/App.test.tsx | 3 +++ .../ui/src/components/DataTable/DataTable.tsx | 1 - airflow/ui/src/pages/DagsList/DagsList.tsx | 1 - 7 files changed, 5 insertions(+), 28 deletions(-) diff --git a/airflow/ui/.prettierignore b/airflow/ui/.prettierignore index a2bcd8157d69c..7e860ea047193 100644 --- a/airflow/ui/.prettierignore +++ b/airflow/ui/.prettierignore @@ -3,3 +3,4 @@ templates/**/*.html dist/ *.md *.yaml +coverage/* diff --git a/airflow/ui/eslint.config.js b/airflow/ui/eslint.config.js index fcce70dff619a..31467415319b3 100644 --- a/airflow/ui/eslint.config.js +++ b/airflow/ui/eslint.config.js @@ -34,7 +34,7 @@ import { unicornRules } from "./rules/unicorn.js"; */ export default /** @type {const} @satisfies {ReadonlyArray} */ ([ // Global ignore of dist directory - { ignores: ["**/dist/"] }, + { ignores: ["**/dist/", "**coverage/"] }, // Base rules coreRules, typescriptRules, diff --git a/airflow/ui/rules/core.js b/airflow/ui/rules/core.js index 3ec26cad6d32c..54011b1ce7da6 100644 --- a/airflow/ui/rules/core.js +++ b/airflow/ui/rules/core.js @@ -96,13 +96,6 @@ export const coreRules = /** @type {const} @satisfies {FlatConfig.Config} */ ({ */ "arrow-body-style": ERROR, - /** - * Limit cyclomatic complexity to a maximum of 10. - * - * @see [complexity](https://eslint.org/docs/latest/rules/complexity) - */ - complexity: [WARN, 10], - /** * Require curly around all control statements. * @@ -291,17 +284,6 @@ export const coreRules = /** @type {const} @satisfies {FlatConfig.Config} */ ({ { max: 250, skipBlankLines: true, skipComments: true }, ], - /** - * Enforce a maximum number of 100 lines of code in a function. - * Need more? Move it to another function. - * - * @see [max-lines-per-function](https://eslint.org/docs/latest/rules/max-lines-per-function) - */ - "max-lines-per-function": [ - ERROR, - { max: 100, skipBlankLines: true, skipComments: true }, - ], - /** * Enforce a maximum depth that callbacks can be nested to 3. * diff --git a/airflow/ui/rules/react.js b/airflow/ui/rules/react.js index 4c8d8b8ba5f09..508718659b4ad 100644 --- a/airflow/ui/rules/react.js +++ b/airflow/ui/rules/react.js @@ -478,13 +478,6 @@ export const reactRules = /** @type {const} @satisfies {FlatConfig.Config} */ ({ */ [`${reactNamespace}/jsx-max-depth`]: [ERROR, { max: 5 }], - /** - * Disallow `Function#bind` or arrow functions in JSX props. - * - * @see [react/jsx-no-bind](https://github.com/jsx-eslint/eslint-plugin-react/blob/HEAD/docs/rules/jsx-no-bind.md) - */ - [`${reactNamespace}/jsx-no-bind`]: ERROR, - /** * Disallow comments from being inserted as text nodes. * diff --git a/airflow/ui/src/App.test.tsx b/airflow/ui/src/App.test.tsx index 5efcf90f1a05d..38b90d1c4983c 100644 --- a/airflow/ui/src/App.test.tsx +++ b/airflow/ui/src/App.test.tsx @@ -26,6 +26,9 @@ import type { DAGCollectionResponse } from "openapi/requests/types.gen"; import { App } from "./App"; import { Wrapper } from "./utils/Wrapper"; +// The null fields actually have to be null instead of undefined +/* eslint-disable unicorn/no-null */ + const mockListDags: DAGCollectionResponse = { dags: [ { diff --git a/airflow/ui/src/components/DataTable/DataTable.tsx b/airflow/ui/src/components/DataTable/DataTable.tsx index 705d7883f07d2..48b934660283f 100644 --- a/airflow/ui/src/components/DataTable/DataTable.tsx +++ b/airflow/ui/src/components/DataTable/DataTable.tsx @@ -63,7 +63,6 @@ type DataTableProps = { const defaultGetRowCanExpand = () => false; -// eslint-disable-next-line max-lines-per-function export const DataTable = ({ columns, data, diff --git a/airflow/ui/src/pages/DagsList/DagsList.tsx b/airflow/ui/src/pages/DagsList/DagsList.tsx index d58e3eaa2038c..90981a0747643 100644 --- a/airflow/ui/src/pages/DagsList/DagsList.tsx +++ b/airflow/ui/src/pages/DagsList/DagsList.tsx @@ -85,7 +85,6 @@ const columns: Array> = [ const PAUSED_PARAM = "paused"; -// eslint-disable-next-line complexity export const DagsList = ({ cardView = false }) => { const [searchParams] = useSearchParams(); From e5a474bdd173263b628d9a8a2efa3860cb88a1c8 Mon Sep 17 00:00:00 2001 From: Gopal Dirisala <39794726+dirrao@users.noreply.github.com> Date: Mon, 7 Oct 2024 20:52:41 +0530 Subject: [PATCH 005/125] kubernetes executor cleanup_stuck_queued_tasks optimization (#41220) * kubernetes executor cleanup_stuck_queued_tasks optimization * kubernetes executor cleanup_stuck_queued_tasks optimization * kubernetes executor cleanup_stuck_queued_tasks optimization * kubernetes executor cleanup_stuck_queued_tasks optimization * Updated comment * Provider change log and version updated * Update the worker pod and task comparison from labels to annotations --- .../providers/cncf/kubernetes/CHANGELOG.rst | 6 + .../executors/kubernetes_executor.py | 111 ++++++++---------- .../providers/cncf/kubernetes/provider.yaml | 1 + .../executors/test_kubernetes_executor.py | 67 ++++++++--- 4 files changed, 107 insertions(+), 78 deletions(-) diff --git a/airflow/providers/cncf/kubernetes/CHANGELOG.rst b/airflow/providers/cncf/kubernetes/CHANGELOG.rst index 1614786802c8a..3a8101943eaf3 100644 --- a/airflow/providers/cncf/kubernetes/CHANGELOG.rst +++ b/airflow/providers/cncf/kubernetes/CHANGELOG.rst @@ -27,6 +27,12 @@ Changelog --------- +main +..... + +.. warning:: + Support for identifying pods by execution_date during the upgrade from Airflow 1 to 2 has been removed. This may result in duplicate pods being launched for tasks originally started by Airflow 1, but only one of the task pods will succeed. + 8.4.2 ..... diff --git a/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py b/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py index 0b2de8085c5dd..7c6e0d8852e5f 100644 --- a/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py +++ b/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py @@ -179,6 +179,36 @@ def _make_safe_label_value(self, input_value: str | datetime) -> str: return pod_generator.datetime_to_label_safe_datestring(input_value) return pod_generator.make_safe_label_value(input_value) + def get_pod_combined_search_str_to_pod_map(self) -> dict[str, k8s.V1Pod]: + """ + List the worker pods owned by this scheduler and create a map containing pod combined search str -> pod. + + For every pod, it creates two below entries in the map + dag_id={dag_id},task_id={task_id},airflow-worker={airflow_worker},,run_id={run_id} + """ + # airflow worker label selector batch call + kwargs = {"label_selector": f"airflow-worker={self._make_safe_label_value(str(self.job_id))}"} + if self.kube_config.kube_client_request_args: + kwargs.update(self.kube_config.kube_client_request_args) + pod_list = self._list_pods(kwargs) + + # create a set against pod query label fields + pod_combined_search_str_to_pod_map = {} + for pod in pod_list: + dag_id = pod.metadata.annotations.get("dag_id", None) + task_id = pod.metadata.annotations.get("task_id", None) + map_index = pod.metadata.annotations.get("map_index", None) + run_id = pod.metadata.annotations.get("run_id", None) + if dag_id is None or task_id is None: + continue + search_base_str = f"dag_id={dag_id},task_id={task_id}" + if map_index is not None: + search_base_str += f",map_index={map_index}" + if run_id is not None: + search_str = f"{search_base_str},run_id={run_id}" + pod_combined_search_str_to_pod_map[search_str] = pod + return pod_combined_search_str_to_pod_map + @provide_session def clear_not_launched_queued_tasks(self, session: Session = NEW_SESSION) -> None: """ @@ -218,32 +248,7 @@ def clear_not_launched_queued_tasks(self, session: Session = NEW_SESSION) -> Non if not queued_tis: return - # airflow worker label selector batch call - kwargs = {"label_selector": f"airflow-worker={self._make_safe_label_value(str(self.job_id))}"} - if self.kube_config.kube_client_request_args: - kwargs.update(self.kube_config.kube_client_request_args) - pod_list = self._list_pods(kwargs) - - # create a set against pod query label fields - label_search_set = set() - for pod in pod_list: - dag_id = pod.metadata.labels.get("dag_id", None) - task_id = pod.metadata.labels.get("task_id", None) - airflow_worker = pod.metadata.labels.get("airflow-worker", None) - map_index = pod.metadata.labels.get("map_index", None) - run_id = pod.metadata.labels.get("run_id", None) - execution_date = pod.metadata.labels.get("execution_date", None) - if dag_id is None or task_id is None or airflow_worker is None: - continue - label_search_base_str = f"dag_id={dag_id},task_id={task_id},airflow-worker={airflow_worker}" - if map_index is not None: - label_search_base_str += f",map_index={map_index}" - if run_id is not None: - label_search_str = f"{label_search_base_str},run_id={run_id}" - label_search_set.add(label_search_str) - if execution_date is not None: - label_search_str = f"{label_search_base_str},execution_date={execution_date}" - label_search_set.add(label_search_str) + pod_combined_search_str_to_pod_map = self.get_pod_combined_search_str_to_pod_map() for ti in queued_tis: self.log.debug("Checking task instance %s", ti) @@ -253,24 +258,13 @@ def clear_not_launched_queued_tasks(self, session: Session = NEW_SESSION) -> Non continue # Build the pod selector - base_label_selector = ( - f"dag_id={self._make_safe_label_value(ti.dag_id)}," - f"task_id={self._make_safe_label_value(ti.task_id)}," - f"airflow-worker={self._make_safe_label_value(str(ti.queued_by_job_id))}" - ) + base_selector = f"dag_id={ti.dag_id},task_id={ti.task_id}" if ti.map_index >= 0: # Old tasks _couldn't_ be mapped, so we don't have to worry about compat - base_label_selector += f",map_index={ti.map_index}" + base_selector += f",map_index={ti.map_index}" - # Try run_id first - label_search_str = f"{base_label_selector},run_id={self._make_safe_label_value(ti.run_id)}" - if label_search_str in label_search_set: - continue - # Fallback to old style of using execution_date - label_search_str = ( - f"{base_label_selector},execution_date={self._make_safe_label_value(ti.execution_date)}" - ) - if label_search_str in label_search_set: + search_str = f"{base_selector},run_id={ti.run_id}" + if search_str in pod_combined_search_str_to_pod_map: continue self.log.info("TaskInstance: %s found in queued state but was not launched, rescheduling", ti) session.execute( @@ -603,34 +597,27 @@ def cleanup_stuck_queued_tasks(self, tis: list[TaskInstance]) -> list[str]: :param tis: List of Task Instances to clean up :return: List of readable task instances for a warning message """ - from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator - if TYPE_CHECKING: assert self.kube_client assert self.kube_scheduler - readable_tis = [] + readable_tis: list[str] = [] + if not tis: + return readable_tis + pod_combined_search_str_to_pod_map = self.get_pod_combined_search_str_to_pod_map() for ti in tis: - selector = PodGenerator.build_selector_for_k8s_executor_pod( - dag_id=ti.dag_id, - task_id=ti.task_id, - try_number=ti.try_number, - map_index=ti.map_index, - run_id=ti.run_id, - airflow_worker=ti.queued_by_job_id, - ) - namespace = self._get_pod_namespace(ti) - pod_list = self.kube_client.list_namespaced_pod( - namespace=namespace, - label_selector=selector, - ).items - if not pod_list: + # Build the pod selector + base_label_selector = f"dag_id={ti.dag_id},task_id={ti.task_id}" + if ti.map_index >= 0: + # Old tasks _couldn't_ be mapped, so we don't have to worry about compat + base_label_selector += f",map_index={ti.map_index}" + + search_str = f"{base_label_selector},run_id={ti.run_id}" + pod = pod_combined_search_str_to_pod_map.get(search_str, None) + if not pod: self.log.warning("Cannot find pod for ti %s", ti) continue - elif len(pod_list) > 1: - self.log.warning("Found multiple pods for ti %s: %s", ti, pod_list) - continue readable_tis.append(repr(ti)) - self.kube_scheduler.delete_pod(pod_name=pod_list[0].metadata.name, namespace=namespace) + self.kube_scheduler.delete_pod(pod_name=pod.metadata.name, namespace=pod.metadata.namespace) return readable_tis def adopt_launched_task( diff --git a/airflow/providers/cncf/kubernetes/provider.yaml b/airflow/providers/cncf/kubernetes/provider.yaml index 570cef67f8237..0849609b8ff80 100644 --- a/airflow/providers/cncf/kubernetes/provider.yaml +++ b/airflow/providers/cncf/kubernetes/provider.yaml @@ -25,6 +25,7 @@ state: ready source-date-epoch: 1726860352 # note that those versions are maintained by release manager - do not update them manually versions: + - 9.0.0 - 8.4.2 - 8.4.1 - 8.4.0 diff --git a/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py b/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py index 8cc46c3dba76b..4622d31b575fc 100644 --- a/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py +++ b/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py @@ -19,7 +19,7 @@ import random import re import string -from datetime import datetime, timedelta +from datetime import datetime from unittest import mock import pytest @@ -1191,28 +1191,52 @@ def test_not_adopt_unassigned_task(self, mock_kube_client): assert tis_to_flush_by_key == {"foobar": {}} @pytest.mark.db_test - @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client") - @mock.patch( - "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.AirflowKubernetesScheduler.delete_pod" - ) - def test_cleanup_stuck_queued_tasks(self, mock_delete_pod, mock_kube_client, dag_maker, session): + @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor.DynamicClient") + def test_cleanup_stuck_queued_tasks(self, mock_kube_dynamic_client, dag_maker, create_dummy_dag, session): """Delete any pods associated with a task stuck in queued.""" - executor = KubernetesExecutor() - executor.start() - executor.scheduler_job_id = "123" - with dag_maker(dag_id="test_cleanup_stuck_queued_tasks"): - op = BashOperator(task_id="bash", bash_command=["echo 0", "echo 1"]) + mock_kube_client = mock.MagicMock() + mock_kube_dynamic_client.return_value = mock.MagicMock() + mock_pod_resource = mock.MagicMock() + mock_kube_dynamic_client.return_value.resources.get.return_value = mock_pod_resource + mock_kube_dynamic_client.return_value.get.return_value = k8s.V1PodList( + items=[ + k8s.V1Pod( + metadata=k8s.V1ObjectMeta( + annotations={ + "dag_id": "test_cleanup_stuck_queued_tasks", + "task_id": "bash", + "run_id": "test", + "try_number": 0, + }, + labels={ + "role": "airflow-worker", + "dag_id": "test_cleanup_stuck_queued_tasks", + "task_id": "bash", + "airflow-worker": 123, + "run_id": "test", + "try_number": 0, + }, + ), + status=k8s.V1PodStatus(phase="Pending"), + ) + ] + ) + create_dummy_dag(dag_id="test_cleanup_stuck_queued_tasks", task_id="bash", with_dagrun_type=None) dag_run = dag_maker.create_dagrun() - ti = dag_run.get_task_instance(op.task_id, session) - ti.retries = 1 + ti = dag_run.task_instances[0] ti.state = State.QUEUED - ti.queued_dttm = timezone.utcnow() - timedelta(minutes=30) + ti.queued_by_job_id = 123 + session.flush() + + executor = self.kubernetes_executor + executor.job_id = 123 + executor.kube_client = mock_kube_client + executor.kube_scheduler = mock.MagicMock() ti.refresh_from_db() tis = [ti] executor.cleanup_stuck_queued_tasks(tis) - mock_delete_pod.assert_called_once() + executor.kube_scheduler.delete_pod.assert_called_once() assert executor.running == set() - executor.end() @pytest.mark.parametrize( "raw_multi_namespace_mode, raw_value_namespace_list, expected_value_in_kube_config", @@ -1292,6 +1316,11 @@ def test_clear_not_launched_queued_tasks_launched( items=[ k8s.V1Pod( metadata=k8s.V1ObjectMeta( + annotations={ + "dag_id": "test_clear", + "task_id": "task1", + "run_id": "test", + }, labels={ "role": "airflow-worker", "dag_id": "test_clear", @@ -1339,6 +1368,12 @@ def get(*args, **kwargs): items=[ k8s.V1Pod( metadata=k8s.V1ObjectMeta( + annotations={ + "dag_id": "test_clear", + "task_id": "bash", + "run_id": "test", + "map_index": 0, + }, labels={ "role": "airflow-worker", "dag_id": "test_clear", From 9ba45b6d5f474f5c39c563f98dd87afa4245a115 Mon Sep 17 00:00:00 2001 From: David Blain Date: Mon, 7 Oct 2024 17:30:26 +0200 Subject: [PATCH 006/125] FIX: Only pass connection to sqlalchemy engine in JdbcHook (#42705) * refactor: Only pass connection as creator to create a sqlalchemy engine in JdbcHook, don't generalize it. * refactor: Make sure engine_kwargs is initialised * docs: Fixed type in docstring * Revert "docs: Fixed type in docstring" This reverts commit 05714bc366a3f765c2064dec7ed11e606b4df112. * refactor: Added unit test for get_sqlalchemy_engine in JdbcHook * refactor: Reformatted get_hook method in TestJdbcHook * refactor: Refactored get_hook method in TestJdbcHook * refactor: Subclassed JdbcHook to allow overriding the get_connection method and return a mocked connection --------- Co-authored-by: David Blain --- airflow/providers/common/sql/hooks/sql.py | 1 - airflow/providers/jdbc/hooks/jdbc.py | 13 ++++++++ tests/providers/jdbc/hooks/test_jdbc.py | 37 ++++++++++++++++++++--- 3 files changed, 45 insertions(+), 6 deletions(-) diff --git a/airflow/providers/common/sql/hooks/sql.py b/airflow/providers/common/sql/hooks/sql.py index dfa8c6fc727ed..7983808d0d579 100644 --- a/airflow/providers/common/sql/hooks/sql.py +++ b/airflow/providers/common/sql/hooks/sql.py @@ -275,7 +275,6 @@ def get_sqlalchemy_engine(self, engine_kwargs=None): """ if engine_kwargs is None: engine_kwargs = {} - engine_kwargs["creator"] = self.get_conn try: url = self.sqlalchemy_url diff --git a/airflow/providers/jdbc/hooks/jdbc.py b/airflow/providers/jdbc/hooks/jdbc.py index 27a438ae414cf..356bd5d450606 100644 --- a/airflow/providers/jdbc/hooks/jdbc.py +++ b/airflow/providers/jdbc/hooks/jdbc.py @@ -163,6 +163,19 @@ def sqlalchemy_url(self) -> URL: database=conn.schema, ) + def get_sqlalchemy_engine(self, engine_kwargs=None): + """ + Get an sqlalchemy_engine object. + + :param engine_kwargs: Kwargs used in :func:`~sqlalchemy.create_engine`. + :return: the created engine. + """ + if engine_kwargs is None: + engine_kwargs = {} + engine_kwargs["creator"] = self.get_conn + + return super().get_sqlalchemy_engine(engine_kwargs) + def get_conn(self) -> jaydebeapi.Connection: conn: Connection = self.get_connection(self.get_conn_id()) host: str = conn.host diff --git a/tests/providers/jdbc/hooks/test_jdbc.py b/tests/providers/jdbc/hooks/test_jdbc.py index cb38ce40ae391..f26a9d7ffb5b3 100644 --- a/tests/providers/jdbc/hooks/test_jdbc.py +++ b/tests/providers/jdbc/hooks/test_jdbc.py @@ -19,6 +19,7 @@ import json import logging +import sqlite3 from unittest import mock from unittest.mock import Mock, patch @@ -36,19 +37,30 @@ jdbc_conn_mock = Mock(name="jdbc_conn") -def get_hook(hook_params=None, conn_params=None): +def get_hook( + hook_params=None, + conn_params=None, + login: str | None = "login", + password: str | None = "password", + host: str | None = "host", + schema: str | None = "schema", + port: int | None = 1234, +): hook_params = hook_params or {} conn_params = conn_params or {} connection = Connection( **{ - **dict(login="login", password="password", host="host", schema="schema", port=1234), + **dict(login=login, password=password, host=host, schema=schema, port=port), **conn_params, } ) - hook = JdbcHook(**hook_params) - hook.get_connection = Mock() - hook.get_connection.return_value = connection + class MockedJdbcHook(JdbcHook): + @classmethod + def get_connection(cls, conn_id: str) -> Connection: + return connection + + hook = MockedJdbcHook(**hook_params) return hook @@ -201,3 +213,18 @@ def test_sqlalchemy_url_with_sqlalchemy_scheme(self): hook = get_hook(conn_params=conn_params, hook_params=hook_params) assert str(hook.sqlalchemy_url) == "mssql://login:password@host:1234/schema" + + def test_get_sqlalchemy_engine_verify_creator_is_being_used(self): + jdbc_hook = get_hook( + conn_params=dict(extra={"sqlalchemy_scheme": "sqlite"}), + login=None, + password=None, + host=None, + schema=":memory:", + port=None, + ) + + with sqlite3.connect(":memory:") as connection: + jdbc_hook.get_conn = lambda: connection + engine = jdbc_hook.get_sqlalchemy_engine() + assert engine.connect().connection.connection == connection From 49e7926f72d41438086a018f6c02eb8381ceec76 Mon Sep 17 00:00:00 2001 From: Gopal Dirisala <39794726+dirrao@users.noreply.github.com> Date: Mon, 7 Oct 2024 22:12:36 +0530 Subject: [PATCH 007/125] The function resolve_kerberos_principal updated when airflow version 2.8 and above (#42777) --- .../apache/spark/hooks/spark_submit.py | 21 +++---------------- 1 file changed, 3 insertions(+), 18 deletions(-) diff --git a/airflow/providers/apache/spark/hooks/spark_submit.py b/airflow/providers/apache/spark/hooks/spark_submit.py index 37f624e310873..825239f7f2237 100644 --- a/airflow/providers/apache/spark/hooks/spark_submit.py +++ b/airflow/providers/apache/spark/hooks/spark_submit.py @@ -517,25 +517,10 @@ def _build_track_driver_status_command(self) -> list[str]: return connection_cmd def _resolve_kerberos_principal(self, principal: str | None) -> str: - """ - Resolve kerberos principal if airflow > 2.8. - - TODO: delete when min airflow version >= 2.8 and import directly from airflow.security.kerberos - """ - from packaging.version import Version - - from airflow.version import version - - if Version(version) < Version("2.8"): - from airflow.utils.net import get_hostname - - return principal or airflow_conf.get_mandatory_value("kerberos", "principal").replace( - "_HOST", get_hostname() - ) - else: - from airflow.security.kerberos import get_kerberos_principle + """Resolve kerberos principal.""" + from airflow.security.kerberos import get_kerberos_principle - return get_kerberos_principle(principal) + return get_kerberos_principle(principal) def submit(self, application: str = "", **kwargs: Any) -> None: """ From fd89b31ebc79252fdeb76b1468ea77432c9ad980 Mon Sep 17 00:00:00 2001 From: Danny Liu Date: Mon, 7 Oct 2024 10:31:08 -0700 Subject: [PATCH 008/125] remove docstring D214 and D215 from ignore list (#42802) Co-authored-by: D. Ferruzzi --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1a7560911cdbc..99fa19fc2d58d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -306,8 +306,6 @@ ignore = [ "D107", # Unwanted; Docstring in every constructor is unnecessary if the class has a docstring. "D203", "D212", # Conflicts with D213. Both can not be enabled. - "D214", - "D215", "E731", # Do not assign a lambda expression, use a def "TCH003", # Do not move imports from stdlib to TYPE_CHECKING block "PT004", # Fixture does not return anything, add leading underscore From c152cc187c3606b331cba8d818734aa6bc9e24ff Mon Sep 17 00:00:00 2001 From: Pierre Jeambrun Date: Tue, 8 Oct 2024 01:33:19 +0800 Subject: [PATCH 009/125] AIP-84 Simplify route definition (#42799) --- airflow/api_fastapi/openapi/v1-generated.yaml | 2 +- airflow/api_fastapi/views/public/connections.py | 6 +++--- airflow/api_fastapi/views/public/dags.py | 12 +++++------- airflow/ui/openapi-gen/requests/services.gen.ts | 4 ++-- airflow/ui/openapi-gen/requests/types.gen.ts | 2 +- 5 files changed, 12 insertions(+), 14 deletions(-) diff --git a/airflow/api_fastapi/openapi/v1-generated.yaml b/airflow/api_fastapi/openapi/v1-generated.yaml index d272dd03d9302..272d0b6703b81 100644 --- a/airflow/api_fastapi/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/openapi/v1-generated.yaml @@ -34,7 +34,7 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/dags: + /public/dags/: get: tags: - DAG diff --git a/airflow/api_fastapi/views/public/connections.py b/airflow/api_fastapi/views/public/connections.py index 850a017988162..94e9b614e9c02 100644 --- a/airflow/api_fastapi/views/public/connections.py +++ b/airflow/api_fastapi/views/public/connections.py @@ -27,11 +27,11 @@ from airflow.api_fastapi.views.router import AirflowRouter from airflow.models import Connection -connections_router = AirflowRouter(tags=["Connection"]) +connections_router = AirflowRouter(tags=["Connection"], prefix="/connections") @connections_router.delete( - "/connections/{connection_id}", + "/{connection_id}", status_code=204, responses=create_openapi_http_exception_doc([401, 403, 404]), ) @@ -49,7 +49,7 @@ async def delete_connection( @connections_router.get( - "/connections/{connection_id}", + "/{connection_id}", responses=create_openapi_http_exception_doc([401, 403, 404]), ) async def get_connection( diff --git a/airflow/api_fastapi/views/public/dags.py b/airflow/api_fastapi/views/public/dags.py index ef76e184505c6..f0df86b787a20 100644 --- a/airflow/api_fastapi/views/public/dags.py +++ b/airflow/api_fastapi/views/public/dags.py @@ -50,10 +50,10 @@ from airflow.api_fastapi.views.router import AirflowRouter from airflow.models import DAG, DagModel -dags_router = AirflowRouter(tags=["DAG"]) +dags_router = AirflowRouter(tags=["DAG"], prefix="/dags") -@dags_router.get("/dags") +@dags_router.get("/") async def get_dags( limit: QueryLimit, offset: QueryOffset, @@ -92,9 +92,7 @@ async def get_dags( ) -@dags_router.get( - "/dags/{dag_id}/details", responses=create_openapi_http_exception_doc([400, 401, 403, 404, 422]) -) +@dags_router.get("/{dag_id}/details", responses=create_openapi_http_exception_doc([400, 401, 403, 404, 422])) async def get_dag_details( dag_id: str, session: Annotated[Session, Depends(get_session)], request: Request ) -> DAGDetailsResponse: @@ -114,7 +112,7 @@ async def get_dag_details( return DAGDetailsResponse.model_validate(dag_model, from_attributes=True) -@dags_router.patch("/dags/{dag_id}", responses=create_openapi_http_exception_doc([400, 401, 403, 404])) +@dags_router.patch("/{dag_id}", responses=create_openapi_http_exception_doc([400, 401, 403, 404])) async def patch_dag( dag_id: str, patch_body: DAGPatchBody, @@ -141,7 +139,7 @@ async def patch_dag( return DAGResponse.model_validate(dag, from_attributes=True) -@dags_router.patch("/dags", responses=create_openapi_http_exception_doc([400, 401, 403, 404])) +@dags_router.patch("/", responses=create_openapi_http_exception_doc([400, 401, 403, 404])) async def patch_dags( patch_body: DAGPatchBody, limit: QueryLimit, diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 023a2a458dd7b..9921aebb79c57 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -66,7 +66,7 @@ export class DagService { ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/dags", + url: "/public/dags/", query: { limit: data.limit, offset: data.offset, @@ -107,7 +107,7 @@ export class DagService { ): CancelablePromise { return __request(OpenAPI, { method: "PATCH", - url: "/public/dags", + url: "/public/dags/", query: { update_mask: data.updateMask, limit: data.limit, diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 65a2db8926510..cdb2379d01c4d 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -240,7 +240,7 @@ export type $OpenApiTs = { }; }; }; - "/public/dags": { + "/public/dags/": { get: { req: GetDagsData; res: { From 59c0c9d7831cb1edff4f3eeb34ecbdf4a1d218aa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 21:09:24 +0200 Subject: [PATCH 010/125] Bump micromatch from 4.0.7 to 4.0.8 in /airflow/ui (#42805) Bumps [micromatch](https://github.com/micromatch/micromatch) from 4.0.7 to 4.0.8. - [Release notes](https://github.com/micromatch/micromatch/releases) - [Changelog](https://github.com/micromatch/micromatch/blob/master/CHANGELOG.md) - [Commits](https://github.com/micromatch/micromatch/compare/4.0.7...4.0.8) --- updated-dependencies: - dependency-name: micromatch dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- airflow/ui/pnpm-lock.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/airflow/ui/pnpm-lock.yaml b/airflow/ui/pnpm-lock.yaml index 515e7fea5279d..01b034085cf97 100644 --- a/airflow/ui/pnpm-lock.yaml +++ b/airflow/ui/pnpm-lock.yaml @@ -2505,8 +2505,8 @@ packages: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} - micromatch@4.0.7: - resolution: {integrity: sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==} + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} engines: {node: '>=8.6'} mime-db@1.52.0: @@ -5653,7 +5653,7 @@ snapshots: '@nodelib/fs.walk': 1.2.8 glob-parent: 5.1.2 merge2: 1.4.1 - micromatch: 4.0.7 + micromatch: 4.0.8 fast-json-stable-stringify@2.1.0: {} @@ -6144,7 +6144,7 @@ snapshots: merge2@1.4.1: {} - micromatch@4.0.7: + micromatch@4.0.8: dependencies: braces: 3.0.3 picomatch: 2.3.1 From 781bc448073b5938e3469bc4eabd25cd43ca795d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 16:21:59 -0400 Subject: [PATCH 011/125] Bump rollup from 4.21.0 to 4.24.0 in /airflow/ui (#42804) Bumps [rollup](https://github.com/rollup/rollup) from 4.21.0 to 4.24.0. - [Release notes](https://github.com/rollup/rollup/releases) - [Changelog](https://github.com/rollup/rollup/blob/master/CHANGELOG.md) - [Commits](https://github.com/rollup/rollup/compare/v4.21.0...v4.24.0) --- updated-dependencies: - dependency-name: rollup dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- airflow/ui/pnpm-lock.yaml | 143 ++++++++++++++++++-------------------- 1 file changed, 69 insertions(+), 74 deletions(-) diff --git a/airflow/ui/pnpm-lock.yaml b/airflow/ui/pnpm-lock.yaml index 01b034085cf97..a2bbbbf127171 100644 --- a/airflow/ui/pnpm-lock.yaml +++ b/airflow/ui/pnpm-lock.yaml @@ -1027,83 +1027,83 @@ packages: resolution: {integrity: sha512-baiMx18+IMuD1yyvOGaHM9QrVUPGGG0jC+z+IPHnRJWUAUvaKuWKyE8gjDj2rzv3sz9zOGoRSPgeBVHRhZnBlA==} engines: {node: '>=14.0.0'} - '@rollup/rollup-android-arm-eabi@4.21.0': - resolution: {integrity: sha512-WTWD8PfoSAJ+qL87lE7votj3syLavxunWhzCnx3XFxFiI/BA/r3X7MUM8dVrH8rb2r4AiO8jJsr3ZjdaftmnfA==} + '@rollup/rollup-android-arm-eabi@4.24.0': + resolution: {integrity: sha512-Q6HJd7Y6xdB48x8ZNVDOqsbh2uByBhgK8PiQgPhwkIw/HC/YX5Ghq2mQY5sRMZWHb3VsFkWooUVOZHKr7DmDIA==} cpu: [arm] os: [android] - '@rollup/rollup-android-arm64@4.21.0': - resolution: {integrity: sha512-a1sR2zSK1B4eYkiZu17ZUZhmUQcKjk2/j9Me2IDjk1GHW7LB5Z35LEzj9iJch6gtUfsnvZs1ZNyDW2oZSThrkA==} + '@rollup/rollup-android-arm64@4.24.0': + resolution: {integrity: sha512-ijLnS1qFId8xhKjT81uBHuuJp2lU4x2yxa4ctFPtG+MqEE6+C5f/+X/bStmxapgmwLwiL3ih122xv8kVARNAZA==} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.21.0': - resolution: {integrity: sha512-zOnKWLgDld/svhKO5PD9ozmL6roy5OQ5T4ThvdYZLpiOhEGY+dp2NwUmxK0Ld91LrbjrvtNAE0ERBwjqhZTRAA==} + '@rollup/rollup-darwin-arm64@4.24.0': + resolution: {integrity: sha512-bIv+X9xeSs1XCk6DVvkO+S/z8/2AMt/2lMqdQbMrmVpgFvXlmde9mLcbQpztXm1tajC3raFDqegsH18HQPMYtA==} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.21.0': - resolution: {integrity: sha512-7doS8br0xAkg48SKE2QNtMSFPFUlRdw9+votl27MvT46vo44ATBmdZdGysOevNELmZlfd+NEa0UYOA8f01WSrg==} + '@rollup/rollup-darwin-x64@4.24.0': + resolution: {integrity: sha512-X6/nOwoFN7RT2svEQWUsW/5C/fYMBe4fnLK9DQk4SX4mgVBiTA9h64kjUYPvGQ0F/9xwJ5U5UfTbl6BEjaQdBQ==} cpu: [x64] os: [darwin] - '@rollup/rollup-linux-arm-gnueabihf@4.21.0': - resolution: {integrity: sha512-pWJsfQjNWNGsoCq53KjMtwdJDmh/6NubwQcz52aEwLEuvx08bzcy6tOUuawAOncPnxz/3siRtd8hiQ32G1y8VA==} + '@rollup/rollup-linux-arm-gnueabihf@4.24.0': + resolution: {integrity: sha512-0KXvIJQMOImLCVCz9uvvdPgfyWo93aHHp8ui3FrtOP57svqrF/roSSR5pjqL2hcMp0ljeGlU4q9o/rQaAQ3AYA==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm-musleabihf@4.21.0': - resolution: {integrity: sha512-efRIANsz3UHZrnZXuEvxS9LoCOWMGD1rweciD6uJQIx2myN3a8Im1FafZBzh7zk1RJ6oKcR16dU3UPldaKd83w==} + '@rollup/rollup-linux-arm-musleabihf@4.24.0': + resolution: {integrity: sha512-it2BW6kKFVh8xk/BnHfakEeoLPv8STIISekpoF+nBgWM4d55CZKc7T4Dx1pEbTnYm/xEKMgy1MNtYuoA8RFIWw==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm64-gnu@4.21.0': - resolution: {integrity: sha512-ZrPhydkTVhyeGTW94WJ8pnl1uroqVHM3j3hjdquwAcWnmivjAwOYjTEAuEDeJvGX7xv3Z9GAvrBkEzCgHq9U1w==} + '@rollup/rollup-linux-arm64-gnu@4.24.0': + resolution: {integrity: sha512-i0xTLXjqap2eRfulFVlSnM5dEbTVque/3Pi4g2y7cxrs7+a9De42z4XxKLYJ7+OhE3IgxvfQM7vQc43bwTgPwA==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-arm64-musl@4.21.0': - resolution: {integrity: sha512-cfaupqd+UEFeURmqNP2eEvXqgbSox/LHOyN9/d2pSdV8xTrjdg3NgOFJCtc1vQ/jEke1qD0IejbBfxleBPHnPw==} + '@rollup/rollup-linux-arm64-musl@4.24.0': + resolution: {integrity: sha512-9E6MKUJhDuDh604Qco5yP/3qn3y7SLXYuiC0Rpr89aMScS2UAmK1wHP2b7KAa1nSjWJc/f/Lc0Wl1L47qjiyQw==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-powerpc64le-gnu@4.21.0': - resolution: {integrity: sha512-ZKPan1/RvAhrUylwBXC9t7B2hXdpb/ufeu22pG2psV7RN8roOfGurEghw1ySmX/CmDDHNTDDjY3lo9hRlgtaHg==} + '@rollup/rollup-linux-powerpc64le-gnu@4.24.0': + resolution: {integrity: sha512-2XFFPJ2XMEiF5Zi2EBf4h73oR1V/lycirxZxHZNc93SqDN/IWhYYSYj8I9381ikUFXZrz2v7r2tOVk2NBwxrWw==} cpu: [ppc64] os: [linux] - '@rollup/rollup-linux-riscv64-gnu@4.21.0': - resolution: {integrity: sha512-H1eRaCwd5E8eS8leiS+o/NqMdljkcb1d6r2h4fKSsCXQilLKArq6WS7XBLDu80Yz+nMqHVFDquwcVrQmGr28rg==} + '@rollup/rollup-linux-riscv64-gnu@4.24.0': + resolution: {integrity: sha512-M3Dg4hlwuntUCdzU7KjYqbbd+BLq3JMAOhCKdBE3TcMGMZbKkDdJ5ivNdehOssMCIokNHFOsv7DO4rlEOfyKpg==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-s390x-gnu@4.21.0': - resolution: {integrity: sha512-zJ4hA+3b5tu8u7L58CCSI0A9N1vkfwPhWd/puGXwtZlsB5bTkwDNW/+JCU84+3QYmKpLi+XvHdmrlwUwDA6kqw==} + '@rollup/rollup-linux-s390x-gnu@4.24.0': + resolution: {integrity: sha512-mjBaoo4ocxJppTorZVKWFpy1bfFj9FeCMJqzlMQGjpNPY9JwQi7OuS1axzNIk0nMX6jSgy6ZURDZ2w0QW6D56g==} cpu: [s390x] os: [linux] - '@rollup/rollup-linux-x64-gnu@4.21.0': - resolution: {integrity: sha512-e2hrvElFIh6kW/UNBQK/kzqMNY5mO+67YtEh9OA65RM5IJXYTWiXjX6fjIiPaqOkBthYF1EqgiZ6OXKcQsM0hg==} + '@rollup/rollup-linux-x64-gnu@4.24.0': + resolution: {integrity: sha512-ZXFk7M72R0YYFN5q13niV0B7G8/5dcQ9JDp8keJSfr3GoZeXEoMHP/HlvqROA3OMbMdfr19IjCeNAnPUG93b6A==} cpu: [x64] os: [linux] - '@rollup/rollup-linux-x64-musl@4.21.0': - resolution: {integrity: sha512-1vvmgDdUSebVGXWX2lIcgRebqfQSff0hMEkLJyakQ9JQUbLDkEaMsPTLOmyccyC6IJ/l3FZuJbmrBw/u0A0uCQ==} + '@rollup/rollup-linux-x64-musl@4.24.0': + resolution: {integrity: sha512-w1i+L7kAXZNdYl+vFvzSZy8Y1arS7vMgIy8wusXJzRrPyof5LAb02KGr1PD2EkRcl73kHulIID0M501lN+vobQ==} cpu: [x64] os: [linux] - '@rollup/rollup-win32-arm64-msvc@4.21.0': - resolution: {integrity: sha512-s5oFkZ/hFcrlAyBTONFY1TWndfyre1wOMwU+6KCpm/iatybvrRgmZVM+vCFwxmC5ZhdlgfE0N4XorsDpi7/4XQ==} + '@rollup/rollup-win32-arm64-msvc@4.24.0': + resolution: {integrity: sha512-VXBrnPWgBpVDCVY6XF3LEW0pOU51KbaHhccHw6AS6vBWIC60eqsH19DAeeObl+g8nKAz04QFdl/Cefta0xQtUQ==} cpu: [arm64] os: [win32] - '@rollup/rollup-win32-ia32-msvc@4.21.0': - resolution: {integrity: sha512-G9+TEqRnAA6nbpqyUqgTiopmnfgnMkR3kMukFBDsiyy23LZvUCpiUwjTRx6ezYCjJODXrh52rBR9oXvm+Fp5wg==} + '@rollup/rollup-win32-ia32-msvc@4.24.0': + resolution: {integrity: sha512-xrNcGDU0OxVcPTH/8n/ShH4UevZxKIO6HJFK0e15XItZP2UcaiLFd5kiX7hJnqCbSztUF8Qot+JWBC/QXRPYWQ==} cpu: [ia32] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.21.0': - resolution: {integrity: sha512-2jsCDZwtQvRhejHLfZ1JY6w6kEuEtfF9nzYsZxzSlNVKDX+DpsDJ+Rbjkm74nvg2rdx0gwBS+IMdvwJuq3S9pQ==} + '@rollup/rollup-win32-x64-msvc@4.24.0': + resolution: {integrity: sha512-fbMkAF7fufku0N2dE5TBXcNlg0pt0cJue4xBRE2Qc5Vqikxr4VCgKj/ht6SMdFcOacVA9rqF70APJ8RN/4vMJw==} cpu: [x64] os: [win32] @@ -1250,9 +1250,6 @@ packages: '@types/aria-query@5.0.4': resolution: {integrity: sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==} - '@types/estree@1.0.5': - resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==} - '@types/estree@1.0.6': resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==} @@ -2929,8 +2926,8 @@ packages: resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} - rollup@4.21.0: - resolution: {integrity: sha512-vo+S/lfA2lMS7rZ2Qoubi6I5hwZwzXeUIctILZLbHI+laNtvhhOIon2S1JksA5UEDQ7l3vberd0fxK44lTYjbQ==} + rollup@4.24.0: + resolution: {integrity: sha512-DOmrlGSXNk1DM0ljiQA+i+o0rSLhtii1je5wgk60j49d1jHT5YYttBv1iWOnYSTG+fZZESUOSNiAl89SIet+Cg==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true @@ -4484,52 +4481,52 @@ snapshots: '@remix-run/router@1.19.2': {} - '@rollup/rollup-android-arm-eabi@4.21.0': + '@rollup/rollup-android-arm-eabi@4.24.0': optional: true - '@rollup/rollup-android-arm64@4.21.0': + '@rollup/rollup-android-arm64@4.24.0': optional: true - '@rollup/rollup-darwin-arm64@4.21.0': + '@rollup/rollup-darwin-arm64@4.24.0': optional: true - '@rollup/rollup-darwin-x64@4.21.0': + '@rollup/rollup-darwin-x64@4.24.0': optional: true - '@rollup/rollup-linux-arm-gnueabihf@4.21.0': + '@rollup/rollup-linux-arm-gnueabihf@4.24.0': optional: true - '@rollup/rollup-linux-arm-musleabihf@4.21.0': + '@rollup/rollup-linux-arm-musleabihf@4.24.0': optional: true - '@rollup/rollup-linux-arm64-gnu@4.21.0': + '@rollup/rollup-linux-arm64-gnu@4.24.0': optional: true - '@rollup/rollup-linux-arm64-musl@4.21.0': + '@rollup/rollup-linux-arm64-musl@4.24.0': optional: true - '@rollup/rollup-linux-powerpc64le-gnu@4.21.0': + '@rollup/rollup-linux-powerpc64le-gnu@4.24.0': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.21.0': + '@rollup/rollup-linux-riscv64-gnu@4.24.0': optional: true - '@rollup/rollup-linux-s390x-gnu@4.21.0': + '@rollup/rollup-linux-s390x-gnu@4.24.0': optional: true - '@rollup/rollup-linux-x64-gnu@4.21.0': + '@rollup/rollup-linux-x64-gnu@4.24.0': optional: true - '@rollup/rollup-linux-x64-musl@4.21.0': + '@rollup/rollup-linux-x64-musl@4.24.0': optional: true - '@rollup/rollup-win32-arm64-msvc@4.21.0': + '@rollup/rollup-win32-arm64-msvc@4.24.0': optional: true - '@rollup/rollup-win32-ia32-msvc@4.21.0': + '@rollup/rollup-win32-ia32-msvc@4.24.0': optional: true - '@rollup/rollup-win32-x64-msvc@4.21.0': + '@rollup/rollup-win32-x64-msvc@4.24.0': optional: true '@stylistic/eslint-plugin@2.8.0(eslint@9.10.0(jiti@1.21.6))(typescript@5.5.4)': @@ -4672,8 +4669,6 @@ snapshots: '@types/aria-query@5.0.4': {} - '@types/estree@1.0.5': {} - '@types/estree@1.0.6': {} '@types/json-schema@7.0.15': {} @@ -6567,26 +6562,26 @@ snapshots: reusify@1.0.4: {} - rollup@4.21.0: + rollup@4.24.0: dependencies: - '@types/estree': 1.0.5 + '@types/estree': 1.0.6 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.21.0 - '@rollup/rollup-android-arm64': 4.21.0 - '@rollup/rollup-darwin-arm64': 4.21.0 - '@rollup/rollup-darwin-x64': 4.21.0 - '@rollup/rollup-linux-arm-gnueabihf': 4.21.0 - '@rollup/rollup-linux-arm-musleabihf': 4.21.0 - '@rollup/rollup-linux-arm64-gnu': 4.21.0 - '@rollup/rollup-linux-arm64-musl': 4.21.0 - '@rollup/rollup-linux-powerpc64le-gnu': 4.21.0 - '@rollup/rollup-linux-riscv64-gnu': 4.21.0 - '@rollup/rollup-linux-s390x-gnu': 4.21.0 - '@rollup/rollup-linux-x64-gnu': 4.21.0 - '@rollup/rollup-linux-x64-musl': 4.21.0 - '@rollup/rollup-win32-arm64-msvc': 4.21.0 - '@rollup/rollup-win32-ia32-msvc': 4.21.0 - '@rollup/rollup-win32-x64-msvc': 4.21.0 + '@rollup/rollup-android-arm-eabi': 4.24.0 + '@rollup/rollup-android-arm64': 4.24.0 + '@rollup/rollup-darwin-arm64': 4.24.0 + '@rollup/rollup-darwin-x64': 4.24.0 + '@rollup/rollup-linux-arm-gnueabihf': 4.24.0 + '@rollup/rollup-linux-arm-musleabihf': 4.24.0 + '@rollup/rollup-linux-arm64-gnu': 4.24.0 + '@rollup/rollup-linux-arm64-musl': 4.24.0 + '@rollup/rollup-linux-powerpc64le-gnu': 4.24.0 + '@rollup/rollup-linux-riscv64-gnu': 4.24.0 + '@rollup/rollup-linux-s390x-gnu': 4.24.0 + '@rollup/rollup-linux-x64-gnu': 4.24.0 + '@rollup/rollup-linux-x64-musl': 4.24.0 + '@rollup/rollup-win32-arm64-msvc': 4.24.0 + '@rollup/rollup-win32-ia32-msvc': 4.24.0 + '@rollup/rollup-win32-x64-msvc': 4.24.0 fsevents: 2.3.3 run-parallel@1.2.0: @@ -6942,7 +6937,7 @@ snapshots: dependencies: esbuild: 0.21.5 postcss: 8.4.45 - rollup: 4.21.0 + rollup: 4.24.0 optionalDependencies: '@types/node': 22.5.4 fsevents: 2.3.3 From 1ce3d9031311e74920591ab3303703232c71290f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 16:22:15 -0400 Subject: [PATCH 012/125] Bump vite from 5.4.4 to 5.4.6 in /airflow/ui (#42803) Bumps [vite](https://github.com/vitejs/vite/tree/HEAD/packages/vite) from 5.4.4 to 5.4.6. - [Release notes](https://github.com/vitejs/vite/releases) - [Changelog](https://github.com/vitejs/vite/blob/v5.4.6/packages/vite/CHANGELOG.md) - [Commits](https://github.com/vitejs/vite/commits/v5.4.6/packages/vite) --- updated-dependencies: - dependency-name: vite dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- airflow/ui/package.json | 2 +- airflow/ui/pnpm-lock.yaml | 35 +++++++++++++++-------------------- 2 files changed, 16 insertions(+), 21 deletions(-) diff --git a/airflow/ui/package.json b/airflow/ui/package.json index 82c6370f9dcba..014564c1604bc 100644 --- a/airflow/ui/package.json +++ b/airflow/ui/package.json @@ -58,7 +58,7 @@ "prettier": "^3.3.3", "typescript": "~5.5.4", "typescript-eslint": "^8.5.0", - "vite": "^5.4.4", + "vite": "^5.4.6", "vitest": "^2.1.1" } } diff --git a/airflow/ui/pnpm-lock.yaml b/airflow/ui/pnpm-lock.yaml index a2bbbbf127171..6298d2ba3256f 100644 --- a/airflow/ui/pnpm-lock.yaml +++ b/airflow/ui/pnpm-lock.yaml @@ -83,7 +83,7 @@ importers: version: 18.3.0 '@vitejs/plugin-react-swc': specifier: ^3.7.0 - version: 3.7.0(vite@5.4.4(@types/node@22.5.4)) + version: 3.7.0(vite@5.4.6(@types/node@22.5.4)) '@vitest/coverage-v8': specifier: ^2.1.1 version: 2.1.1(vitest@2.1.1(@types/node@22.5.4)(happy-dom@15.0.0)) @@ -130,8 +130,8 @@ importers: specifier: ^8.5.0 version: 8.5.0(eslint@9.10.0(jiti@1.21.6))(typescript@5.5.4) vite: - specifier: ^5.4.4 - version: 5.4.4(@types/node@22.5.4) + specifier: ^5.4.6 + version: 5.4.6(@types/node@22.5.4) vitest: specifier: ^2.1.1 version: 2.1.1(@types/node@22.5.4)(happy-dom@15.0.0) @@ -2721,9 +2721,6 @@ packages: perfect-debounce@1.0.0: resolution: {integrity: sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==} - picocolors@1.0.1: - resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==} - picocolors@1.1.0: resolution: {integrity: sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==} @@ -3251,8 +3248,8 @@ packages: engines: {node: ^18.0.0 || >=20.0.0} hasBin: true - vite@5.4.4: - resolution: {integrity: sha512-RHFCkULitycHVTtelJ6jQLd+KSAAzOgEYorV32R2q++M6COBjKJR6BxqClwp5sf0XaBDjVMuJ9wnNfyAJwjMkA==} + vite@5.4.6: + resolution: {integrity: sha512-IeL5f8OO5nylsgzd9tq4qD2QqI0k2CQLGrWD0rCN0EQJZpBK5vJAx0I+GDkMOXxQX/OfFHMuLIx6ddAxGX/k+Q==} engines: {node: ^18.0.0 || >=20.0.0} hasBin: true peerDependencies: @@ -3394,7 +3391,7 @@ snapshots: '@babel/code-frame@7.24.7': dependencies: '@babel/highlight': 7.24.7 - picocolors: 1.0.1 + picocolors: 1.1.0 '@babel/generator@7.17.7': dependencies: @@ -3442,7 +3439,7 @@ snapshots: '@babel/helper-validator-identifier': 7.24.7 chalk: 2.4.2 js-tokens: 4.0.0 - picocolors: 1.0.1 + picocolors: 1.1.0 '@babel/parser@7.25.4': dependencies: @@ -4821,10 +4818,10 @@ snapshots: '@typescript-eslint/types': 8.5.0 eslint-visitor-keys: 3.4.3 - '@vitejs/plugin-react-swc@3.7.0(vite@5.4.4(@types/node@22.5.4))': + '@vitejs/plugin-react-swc@3.7.0(vite@5.4.6(@types/node@22.5.4))': dependencies: '@swc/core': 1.7.14 - vite: 5.4.4(@types/node@22.5.4) + vite: 5.4.6(@types/node@22.5.4) transitivePeerDependencies: - '@swc/helpers' @@ -4853,13 +4850,13 @@ snapshots: chai: 5.1.1 tinyrainbow: 1.2.0 - '@vitest/mocker@2.1.1(@vitest/spy@2.1.1)(vite@5.4.4(@types/node@22.5.4))': + '@vitest/mocker@2.1.1(@vitest/spy@2.1.1)(vite@5.4.6(@types/node@22.5.4))': dependencies: '@vitest/spy': 2.1.1 estree-walker: 3.0.3 magic-string: 0.30.11 optionalDependencies: - vite: 5.4.4(@types/node@22.5.4) + vite: 5.4.6(@types/node@22.5.4) '@vitest/pretty-format@2.1.1': dependencies: @@ -6341,8 +6338,6 @@ snapshots: perfect-debounce@1.0.0: {} - picocolors@1.0.1: {} - picocolors@1.1.0: {} picomatch@2.3.1: {} @@ -6921,7 +6916,7 @@ snapshots: cac: 6.7.14 debug: 4.3.7 pathe: 1.1.2 - vite: 5.4.4(@types/node@22.5.4) + vite: 5.4.6(@types/node@22.5.4) transitivePeerDependencies: - '@types/node' - less @@ -6933,7 +6928,7 @@ snapshots: - supports-color - terser - vite@5.4.4(@types/node@22.5.4): + vite@5.4.6(@types/node@22.5.4): dependencies: esbuild: 0.21.5 postcss: 8.4.45 @@ -6945,7 +6940,7 @@ snapshots: vitest@2.1.1(@types/node@22.5.4)(happy-dom@15.0.0): dependencies: '@vitest/expect': 2.1.1 - '@vitest/mocker': 2.1.1(@vitest/spy@2.1.1)(vite@5.4.4(@types/node@22.5.4)) + '@vitest/mocker': 2.1.1(@vitest/spy@2.1.1)(vite@5.4.6(@types/node@22.5.4)) '@vitest/pretty-format': 2.1.1 '@vitest/runner': 2.1.1 '@vitest/snapshot': 2.1.1 @@ -6960,7 +6955,7 @@ snapshots: tinyexec: 0.3.0 tinypool: 1.0.1 tinyrainbow: 1.2.0 - vite: 5.4.4(@types/node@22.5.4) + vite: 5.4.6(@types/node@22.5.4) vite-node: 2.1.1(@types/node@22.5.4) why-is-node-running: 2.3.0 optionalDependencies: From 9ded623e50ff9b4e5ae4fdb0217321beaffbfaa9 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Tue, 8 Oct 2024 12:16:27 +0800 Subject: [PATCH 013/125] fix(assets/managers): fix error handling file loc when asset alias resolved into new assets (#42735) * fix(assets/managers): fix error handling file loc when asset alias resolved into new assets * test(asset/manager): add test case test_register_asset_change_with_alias * refactor(assets/manager): simplify for loop --- airflow/assets/manager.py | 5 ++- tests/assets/test_manager.py | 83 +++++++++++++++++++++++++++++------- 2 files changed, 71 insertions(+), 17 deletions(-) diff --git a/airflow/assets/manager.py b/airflow/assets/manager.py index d68a0efc87d12..cd4d72e633a8e 100644 --- a/airflow/assets/manager.py +++ b/airflow/assets/manager.py @@ -266,14 +266,15 @@ def _send_dag_priority_parsing_request_if_needed(fileloc: str) -> str | None: return None return req.fileloc - (_send_dag_priority_parsing_request_if_needed(fileloc) for fileloc in file_locs) + for fileloc in file_locs: + _send_dag_priority_parsing_request_if_needed(fileloc) @classmethod def _postgres_send_dag_priority_parsing_request(cls, file_locs: Iterable[str], session: Session) -> None: from sqlalchemy.dialects.postgresql import insert stmt = insert(DagPriorityParsingRequest).on_conflict_do_nothing() - session.execute(stmt, {"fileloc": fileloc for fileloc in file_locs}) + session.execute(stmt, [{"fileloc": fileloc} for fileloc in file_locs]) def resolve_asset_manager() -> AssetManager: diff --git a/tests/assets/test_manager.py b/tests/assets/test_manager.py index 0539fdace52ba..950949fe474e3 100644 --- a/tests/assets/test_manager.py +++ b/tests/assets/test_manager.py @@ -24,11 +24,19 @@ import pytest from sqlalchemy import delete -from airflow.assets import Asset +from airflow.assets import Asset, AssetAlias from airflow.assets.manager import AssetManager from airflow.listeners.listener import get_listener_manager -from airflow.models.asset import AssetDagRunQueue, AssetEvent, AssetModel, DagScheduleAssetReference +from airflow.models.asset import ( + AssetAliasModel, + AssetDagRunQueue, + AssetEvent, + AssetModel, + DagScheduleAssetAliasReference, + DagScheduleAssetReference, +) from airflow.models.dag import DagModel +from airflow.models.dagbag import DagPriorityParsingRequest from airflow.serialization.pydantic.taskinstance import TaskInstancePydantic from tests.listeners import asset_listener @@ -38,6 +46,15 @@ pytest.importorskip("pydantic", minversion="2.0.0") +@pytest.fixture +def clear_assets(): + from tests.test_utils.db import clear_db_assets + + clear_db_assets() + yield + clear_db_assets() + + @pytest.fixture def mock_task_instance(): return TaskInstancePydantic( @@ -92,15 +109,16 @@ def create_mock_dag(): class TestAssetManager: def test_register_asset_change_asset_doesnt_exist(self, mock_task_instance): - dsem = AssetManager() - asset = Asset(uri="asset_doesnt_exist") mock_session = mock.Mock() # Gotta mock up the query results mock_session.scalar.return_value = None - dsem.register_asset_change(task_instance=mock_task_instance, asset=asset, session=mock_session) + asset_manger = AssetManager() + asset_manger.register_asset_change( + task_instance=mock_task_instance, asset=asset, session=mock_session + ) # Ensure that we have ignored the asset and _not_ created a AssetEvent or # AssetDagRunQueue rows @@ -108,9 +126,9 @@ def test_register_asset_change_asset_doesnt_exist(self, mock_task_instance): mock_session.merge.assert_not_called() def test_register_asset_change(self, session, dag_maker, mock_task_instance): - dsem = AssetManager() + asset_manager = AssetManager() - ds = Asset(uri="test_asset_uri") + asset = Asset(uri="test_asset_uri") dag1 = DagModel(dag_id="dag1", is_active=True) dag2 = DagModel(dag_id="dag2", is_active=True) session.add_all([dag1, dag2]) @@ -121,23 +139,58 @@ def test_register_asset_change(self, session, dag_maker, mock_task_instance): session.execute(delete(AssetDagRunQueue)) session.flush() - dsem.register_asset_change(task_instance=mock_task_instance, asset=ds, session=session) + asset_manager.register_asset_change(task_instance=mock_task_instance, asset=asset, session=session) + session.flush() + + # Ensure we've created an asset + assert session.query(AssetEvent).filter_by(dataset_id=asm.id).count() == 1 + assert session.query(AssetDagRunQueue).count() == 2 + + @pytest.mark.usefixtures("clear_assets") + def test_register_asset_change_with_alias(self, session, dag_maker, mock_task_instance): + consumer_dag_1 = DagModel(dag_id="conumser_1", is_active=True, fileloc="dag1.py") + consumer_dag_2 = DagModel(dag_id="conumser_2", is_active=True, fileloc="dag2.py") + session.add_all([consumer_dag_1, consumer_dag_2]) + + asm = AssetModel(uri="test_asset_uri") + session.add(asm) + + asam = AssetAliasModel(name="test_alias_name") + session.add(asam) + asam.consuming_dags = [ + DagScheduleAssetAliasReference(alias_id=asam.id, dag_id=dag.dag_id) + for dag in (consumer_dag_1, consumer_dag_2) + ] + session.execute(delete(AssetDagRunQueue)) + session.flush() + + asset = Asset(uri="test_asset_uri") + asset_alias = AssetAlias(name="test_alias_name") + asset_manager = AssetManager() + asset_manager.register_asset_change( + task_instance=mock_task_instance, + asset=asset, + aliases=[asset_alias], + source_alias_names=["test_alias_name"], + session=session, + ) session.flush() # Ensure we've created an asset assert session.query(AssetEvent).filter_by(dataset_id=asm.id).count() == 1 assert session.query(AssetDagRunQueue).count() == 2 + assert session.query(DagPriorityParsingRequest).count() == 2 def test_register_asset_change_no_downstreams(self, session, mock_task_instance): - dsem = AssetManager() + asset_manager = AssetManager() - ds = Asset(uri="never_consumed") + asset = Asset(uri="never_consumed") asm = AssetModel(uri="never_consumed") session.add(asm) session.execute(delete(AssetDagRunQueue)) session.flush() - dsem.register_asset_change(task_instance=mock_task_instance, asset=ds, session=session) + asset_manager.register_asset_change(task_instance=mock_task_instance, asset=asset, session=session) session.flush() # Ensure we've created an asset @@ -146,11 +199,11 @@ def test_register_asset_change_no_downstreams(self, session, mock_task_instance) @pytest.mark.skip_if_database_isolation_mode def test_register_asset_change_notifies_asset_listener(self, session, mock_task_instance): - dsem = AssetManager() + asset_manager = AssetManager() asset_listener.clear() get_listener_manager().add_listener(asset_listener) - ds = Asset(uri="test_asset_uri_2") + asset = Asset(uri="test_asset_uri_2") dag1 = DagModel(dag_id="dag3") session.add(dag1) @@ -159,12 +212,12 @@ def test_register_asset_change_notifies_asset_listener(self, session, mock_task_ asm.consuming_dags = [DagScheduleAssetReference(dag_id=dag1.dag_id)] session.flush() - dsem.register_asset_change(task_instance=mock_task_instance, asset=ds, session=session) + asset_manager.register_asset_change(task_instance=mock_task_instance, asset=asset, session=session) session.flush() # Ensure the listener was notified assert len(asset_listener.changed) == 1 - assert asset_listener.changed[0].uri == ds.uri + assert asset_listener.changed[0].uri == asset.uri @pytest.mark.skip_if_database_isolation_mode def test_create_assets_notifies_asset_listener(self, session): From 000d2da0c86e68731f7a544f0c394c52121403f9 Mon Sep 17 00:00:00 2001 From: Pierre Jeambrun Date: Tue, 8 Oct 2024 15:16:13 +0800 Subject: [PATCH 014/125] AIP-84 Delete Variable (#42798) --- .../endpoints/variable_endpoint.py | 2 + airflow/api_fastapi/openapi/v1-generated.yaml | 41 ++++++++++++ airflow/api_fastapi/views/public/__init__.py | 2 + airflow/api_fastapi/views/public/variables.py | 42 ++++++++++++ airflow/ui/openapi-gen/queries/common.ts | 4 ++ airflow/ui/openapi-gen/queries/queries.ts | 40 +++++++++++ .../ui/openapi-gen/requests/services.gen.ts | 30 +++++++++ airflow/ui/openapi-gen/requests/types.gen.ts | 33 ++++++++++ .../views/public/test_variables.py | 66 +++++++++++++++++++ 9 files changed, 260 insertions(+) create mode 100644 airflow/api_fastapi/views/public/variables.py create mode 100644 tests/api_fastapi/views/public/test_variables.py diff --git a/airflow/api_connexion/endpoints/variable_endpoint.py b/airflow/api_connexion/endpoints/variable_endpoint.py index 1375484a422fc..9413f9158652d 100644 --- a/airflow/api_connexion/endpoints/variable_endpoint.py +++ b/airflow/api_connexion/endpoints/variable_endpoint.py @@ -31,6 +31,7 @@ from airflow.api_connexion.schemas.variable_schema import variable_collection_schema, variable_schema from airflow.models import Variable from airflow.security import permissions +from airflow.utils.api_migration import mark_fastapi_migration_done from airflow.utils.log.action_logger import action_event_from_permission from airflow.utils.session import NEW_SESSION, provide_session from airflow.www.decorators import action_logging @@ -43,6 +44,7 @@ RESOURCE_EVENT_PREFIX = "variable" +@mark_fastapi_migration_done @security.requires_access_variable("DELETE") @action_logging( event=action_event_from_permission( diff --git a/airflow/api_fastapi/openapi/v1-generated.yaml b/airflow/api_fastapi/openapi/v1-generated.yaml index 272d0b6703b81..f3bc8612bfa76 100644 --- a/airflow/api_fastapi/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/openapi/v1-generated.yaml @@ -455,6 +455,47 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /public/variables/{variable_key}: + delete: + tags: + - Variable + summary: Delete Variable + description: Delete a variable entry. + operationId: delete_variable + parameters: + - name: variable_key + in: path + required: true + schema: + type: string + title: Variable Key + responses: + '204': + description: Successful Response + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' components: schemas: ConnectionResponse: diff --git a/airflow/api_fastapi/views/public/__init__.py b/airflow/api_fastapi/views/public/__init__.py index 9c0eefebb875e..4e02d9ab43bcf 100644 --- a/airflow/api_fastapi/views/public/__init__.py +++ b/airflow/api_fastapi/views/public/__init__.py @@ -19,6 +19,7 @@ from airflow.api_fastapi.views.public.connections import connections_router from airflow.api_fastapi.views.public.dags import dags_router +from airflow.api_fastapi.views.public.variables import variables_router from airflow.api_fastapi.views.router import AirflowRouter public_router = AirflowRouter(prefix="/public") @@ -26,3 +27,4 @@ public_router.include_router(dags_router) public_router.include_router(connections_router) +public_router.include_router(variables_router) diff --git a/airflow/api_fastapi/views/public/variables.py b/airflow/api_fastapi/views/public/variables.py new file mode 100644 index 0000000000000..e4edb8601fd09 --- /dev/null +++ b/airflow/api_fastapi/views/public/variables.py @@ -0,0 +1,42 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from fastapi import Depends, HTTPException +from sqlalchemy.orm import Session +from typing_extensions import Annotated + +from airflow.api_fastapi.db.common import get_session +from airflow.api_fastapi.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.views.router import AirflowRouter +from airflow.models.variable import Variable + +variables_router = AirflowRouter(tags=["Variable"], prefix="/variables") + + +@variables_router.delete( + "/{variable_key}", + status_code=204, + responses=create_openapi_http_exception_doc([401, 403, 404]), +) +async def delete_variable( + variable_key: str, + session: Annotated[Session, Depends(get_session)], +): + """Delete a variable entry.""" + if Variable.delete(variable_key, session) == 0: + raise HTTPException(404, f"The Variable with key: `{variable_key}` was not found") diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index ff7bb3995cb49..afd7afd5bcc06 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -5,6 +5,7 @@ import { AssetService, ConnectionService, DagService, + VariableService, } from "../requests/services.gen"; import { DagRunState } from "../requests/types.gen"; @@ -119,3 +120,6 @@ export type DagServicePatchDagMutationResult = Awaited< export type ConnectionServiceDeleteConnectionMutationResult = Awaited< ReturnType >; +export type VariableServiceDeleteVariableMutationResult = Awaited< + ReturnType +>; diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index 4aa627d74fd0c..22d58eadda0cb 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -10,6 +10,7 @@ import { AssetService, ConnectionService, DagService, + VariableService, } from "../requests/services.gen"; import { DAGPatchBody, DagRunState } from "../requests/types.gen"; import * as Common from "./common"; @@ -345,3 +346,42 @@ export const useConnectionServiceDeleteConnection = < }) as unknown as Promise, ...options, }); +/** + * Delete Variable + * Delete a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns void Successful Response + * @throws ApiError + */ +export const useVariableServiceDeleteVariable = < + TData = Common.VariableServiceDeleteVariableMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + variableKey: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + variableKey: string; + }, + TContext + >({ + mutationFn: ({ variableKey }) => + VariableService.deleteVariable({ + variableKey, + }) as unknown as Promise, + ...options, + }); diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 9921aebb79c57..72fd2f68f1000 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -17,6 +17,8 @@ import type { DeleteConnectionResponse, GetConnectionData, GetConnectionResponse, + DeleteVariableData, + DeleteVariableResponse, } from "./types.gen"; export class AssetService { @@ -246,3 +248,31 @@ export class ConnectionService { }); } } + +export class VariableService { + /** + * Delete Variable + * Delete a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns void Successful Response + * @throws ApiError + */ + public static deleteVariable( + data: DeleteVariableData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "DELETE", + url: "/public/variables/{variable_key}", + path: { + variable_key: data.variableKey, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } +} diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index cdb2379d01c4d..d07d980397c83 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -222,6 +222,12 @@ export type GetConnectionData = { export type GetConnectionResponse = ConnectionResponse; +export type DeleteVariableData = { + variableKey: string; +}; + +export type DeleteVariableResponse = void; + export type $OpenApiTs = { "/ui/next_run_assets/{dag_id}": { get: { @@ -398,4 +404,31 @@ export type $OpenApiTs = { }; }; }; + "/public/variables/{variable_key}": { + delete: { + req: DeleteVariableData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; }; diff --git a/tests/api_fastapi/views/public/test_variables.py b/tests/api_fastapi/views/public/test_variables.py new file mode 100644 index 0000000000000..56d65b98b6589 --- /dev/null +++ b/tests/api_fastapi/views/public/test_variables.py @@ -0,0 +1,66 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import pytest + +from airflow.models.variable import Variable +from airflow.utils.session import provide_session +from tests.test_utils.db import clear_db_variables + +pytestmark = pytest.mark.db_test + +TEST_VARIABLE_KEY = "test_variable_key" +TEST_VARIABLE_VAL = 3 +TEST_VARIABLE_DESCRIPTION = "Some description for the variable" +TEST_CONN_TYPE = "test_type" + + +@provide_session +def _create_variable(session) -> None: + Variable.set( + key=TEST_VARIABLE_KEY, value=TEST_VARIABLE_VAL, description=TEST_VARIABLE_DESCRIPTION, session=session + ) + + +class TestVariableEndpoint: + @pytest.fixture(autouse=True) + def setup(self) -> None: + clear_db_variables() + + def teardown_method(self) -> None: + clear_db_variables() + + def create_variable(self): + _create_variable() + + +class TestDeleteVariable(TestVariableEndpoint): + def test_delete_should_respond_204(self, test_client, session): + self.create_variable() + variables = session.query(Variable).all() + assert len(variables) == 1 + response = test_client.delete(f"/public/variables/{TEST_VARIABLE_KEY}") + assert response.status_code == 204 + variables = session.query(Variable).all() + assert len(variables) == 0 + + def test_delete_should_respond_404(self, test_client): + response = test_client.delete(f"/public/variables/{TEST_VARIABLE_KEY}") + assert response.status_code == 404 + body = response.json() + assert f"The Variable with key: `{TEST_VARIABLE_KEY}` was not found" == body["detail"] From 63ff22f4038f34354dc5807036d1bf10653c2ecd Mon Sep 17 00:00:00 2001 From: Jens Scheffler <95105677+jscheffl@users.noreply.github.com> Date: Tue, 8 Oct 2024 12:00:11 +0200 Subject: [PATCH 015/125] Drop python3.8 support core and providers (#42766) * Drop Python 3.8 support in core Add newsfragment * Drop Python 3.8 support in provider packages --- .github/actions/breeze/action.yml | 3 - .github/workflows/build-images.yml | 18 +- .github/workflows/check-providers.yml | 5 +- .pre-commit-config.yaml | 7 - .readthedocs.yml | 2 +- Dockerfile | 2 +- Dockerfile.ci | 4 +- INSTALL | 12 +- README.md | 8 +- airflow/cli/commands/connection_command.py | 2 +- airflow/compat/functools.py | 33 -- airflow/configuration.py | 4 +- airflow/io/__init__.py | 2 +- airflow/models/taskinstance.py | 2 +- airflow/operators/python.py | 2 +- .../MANAGING_PROVIDERS_LIFECYCLE.rst | 2 +- .../amazon/aws/transfers/sql_to_s3.py | 9 +- airflow/providers/amazon/aws/utils/mixins.py | 3 +- airflow/providers/cloudant/provider.yaml | 3 +- .../kubernetes/kubernetes_helper_functions.py | 2 +- airflow/providers/common/io/xcom/backend.py | 21 +- airflow/providers/openlineage/conf.py | 2 +- airflow/serialization/serialized_objects.py | 2 +- airflow/serialization/serializers/timezone.py | 16 +- airflow/utils/log/secrets_masker.py | 3 +- airflow/utils/platform.py | 3 +- airflow/www/forms.py | 2 +- airflow/www/views.py | 5 +- clients/python/README.md | 2 +- clients/python/pyproject.toml | 5 +- clients/python/test_python_client.py | 2 +- constraints/README.md | 6 +- .../03_contributors_quick_start.rst | 49 ++- contributing-docs/05_pull_requests.rst | 2 +- contributing-docs/07_local_virtualenv.rst | 14 +- contributing-docs/08_static_code_checks.rst | 4 +- .../12_airflow_dependencies_and_extras.rst | 8 +- .../testing/docker_compose_tests.rst | 8 +- contributing-docs/testing/k8s_tests.rst | 84 ++--- contributing-docs/testing/unit_tests.rst | 12 +- dev/README_RELEASE_AIRFLOW.md | 8 +- dev/README_RELEASE_PROVIDER_PACKAGES.md | 2 +- dev/breeze/README.md | 2 +- dev/breeze/doc/01_installation.rst | 6 +- dev/breeze/doc/03_developer_tasks.rst | 10 +- dev/breeze/doc/06_managing_docker_images.rst | 4 +- dev/breeze/doc/10_advanced_breeze_topics.rst | 6 +- ...002-implement-standalone-python-command.md | 2 +- dev/breeze/doc/ci/02_images.md | 63 ++-- dev/breeze/doc/ci/04_selective_checks.md | 12 +- dev/breeze/doc/ci/08_running_ci_locally.md | 8 +- dev/breeze/doc/images/output-commands.svg | 4 +- .../doc/images/output_ci-image_build.svg | 6 +- .../doc/images/output_ci-image_build.txt | 2 +- .../doc/images/output_ci-image_pull.svg | 64 ++-- .../doc/images/output_ci-image_pull.txt | 2 +- .../doc/images/output_ci-image_verify.svg | 6 +- .../doc/images/output_ci-image_verify.txt | 2 +- .../doc/images/output_k8s_build-k8s-image.svg | 6 +- .../doc/images/output_k8s_build-k8s-image.txt | 2 +- .../images/output_k8s_configure-cluster.svg | 60 ++-- .../images/output_k8s_configure-cluster.txt | 2 +- .../doc/images/output_k8s_create-cluster.svg | 6 +- .../doc/images/output_k8s_create-cluster.txt | 2 +- .../doc/images/output_k8s_delete-cluster.svg | 34 +- .../doc/images/output_k8s_delete-cluster.txt | 2 +- .../doc/images/output_k8s_deploy-airflow.svg | 6 +- .../doc/images/output_k8s_deploy-airflow.txt | 2 +- dev/breeze/doc/images/output_k8s_k9s.svg | 32 +- dev/breeze/doc/images/output_k8s_k9s.txt | 2 +- dev/breeze/doc/images/output_k8s_logs.svg | 34 +- dev/breeze/doc/images/output_k8s_logs.txt | 2 +- .../images/output_k8s_run-complete-tests.svg | 70 ++-- .../images/output_k8s_run-complete-tests.txt | 2 +- dev/breeze/doc/images/output_k8s_shell.svg | 42 ++- dev/breeze/doc/images/output_k8s_shell.txt | 2 +- dev/breeze/doc/images/output_k8s_status.svg | 4 +- dev/breeze/doc/images/output_k8s_status.txt | 2 +- dev/breeze/doc/images/output_k8s_tests.svg | 70 ++-- dev/breeze/doc/images/output_k8s_tests.txt | 2 +- .../images/output_k8s_upload-k8s-image.svg | 58 ++-- .../images/output_k8s_upload-k8s-image.txt | 2 +- .../doc/images/output_prod-image_build.svg | 6 +- .../doc/images/output_prod-image_build.txt | 2 +- .../doc/images/output_prod-image_pull.svg | 64 ++-- .../doc/images/output_prod-image_pull.txt | 2 +- .../doc/images/output_prod-image_verify.svg | 6 +- .../doc/images/output_prod-image_verify.txt | 2 +- ...elease-management_generate-constraints.svg | 6 +- ...elease-management_generate-constraints.txt | 2 +- ...e-management_install-provider-packages.svg | 126 ++++--- ...e-management_install-provider-packages.txt | 2 +- ...release-management_release-prod-images.svg | 2 +- ...release-management_release-prod-images.txt | 2 +- ...se-management_verify-provider-packages.svg | 132 ++++---- ...se-management_verify-provider-packages.txt | 2 +- ...put_sbom_export-dependency-information.svg | 4 +- ...put_sbom_export-dependency-information.txt | 2 +- dev/breeze/doc/images/output_setup_config.svg | 4 +- dev/breeze/doc/images/output_setup_config.txt | 2 +- dev/breeze/doc/images/output_shell.svg | 4 +- dev/breeze/doc/images/output_shell.txt | 2 +- .../doc/images/output_start-airflow.svg | 4 +- .../doc/images/output_start-airflow.txt | 2 +- .../doc/images/output_static-checks.svg | 26 +- .../doc/images/output_static-checks.txt | 2 +- .../doc/images/output_testing_db-tests.svg | 4 +- .../doc/images/output_testing_db-tests.txt | 2 +- .../output_testing_docker-compose-tests.svg | 4 +- .../output_testing_docker-compose-tests.txt | 2 +- .../output_testing_integration-tests.svg | 4 +- .../output_testing_integration-tests.txt | 2 +- .../images/output_testing_non-db-tests.svg | 4 +- .../images/output_testing_non-db-tests.txt | 2 +- .../doc/images/output_testing_tests.svg | 4 +- .../doc/images/output_testing_tests.txt | 2 +- dev/breeze/pyproject.toml | 3 +- .../airflow_breeze/commands/ci_commands.py | 3 +- .../commands/developer_commands.py | 2 +- .../commands/minor_release_command.py | 2 +- .../commands/release_candidate_command.py | 2 +- .../commands/release_management_commands.py | 3 +- .../src/airflow_breeze/global_constants.py | 28 +- .../params/common_build_params.py | 2 +- .../src/airflow_breeze/pre_commit_ids.py | 1 - .../templates/pyproject_TEMPLATE.toml.jinja2 | 2 +- .../src/airflow_breeze/utils/backtracking.py | 2 +- .../src/airflow_breeze/utils/black_utils.py | 4 +- dev/breeze/src/airflow_breeze/utils/cdxgen.py | 3 +- .../src/airflow_breeze/utils/coertions.py | 2 +- .../src/airflow_breeze/utils/console.py | 6 +- .../utils/custom_param_types.py | 3 +- .../src/airflow_breeze/utils/packages.py | 5 +- .../src/airflow_breeze/utils/parallel.py | 3 +- .../src/airflow_breeze/utils/path_utils.py | 4 +- .../src/airflow_breeze/utils/run_utils.py | 7 +- .../airflow_breeze/utils/selective_checks.py | 8 +- .../airflow_breeze/utils/virtualenv_utils.py | 2 +- dev/breeze/tests/test_cache.py | 4 +- dev/breeze/tests/test_packages.py | 4 +- dev/breeze/tests/test_run_test_args.py | 6 +- dev/breeze/tests/test_selective_checks.py | 315 +++++++++--------- dev/breeze/tests/test_shell_params.py | 8 +- dev/check_files.py | 2 +- dev/refresh_images.sh | 4 +- dev/retag_docker_images.py | 2 +- docker_tests/constants.py | 2 +- docker_tests/docker_utils.py | 4 +- .../executors/general.rst | 6 +- .../installing-from-pypi.rst | 2 +- .../modules_management.rst | 26 +- docs/apache-airflow/extra-packages-ref.rst | 2 +- .../installation/installing-from-pypi.rst | 24 +- .../installation/prerequisites.rst | 2 +- .../installation/supported-versions.rst | 4 +- docs/apache-airflow/start.rst | 4 +- docs/docker-stack/README.md | 8 +- docs/docker-stack/build-arg-ref.rst | 2 +- docs/docker-stack/build.rst | 20 +- .../customizing/add-build-essential-custom.sh | 2 +- .../customizing/custom-sources.sh | 2 +- .../github-different-repository.sh | 2 +- .../customizing/github-main.sh | 2 +- .../customizing/github-v2-2-test.sh | 2 +- .../customizing/pypi-dev-runtime-deps.sh | 2 +- .../customizing/pypi-extras-and-deps.sh | 2 +- .../customizing/pypi-selected-version.sh | 2 +- .../restricted/restricted_environments.sh | 10 +- docs/docker-stack/entrypoint.rst | 14 +- docs/docker-stack/index.rst | 8 +- generated/PYPI_README.md | 6 +- generated/provider_dependencies.json | 1 - hatch_build.py | 2 +- .../test_kubernetes_pod_operator.py | 2 +- newsfragments/42739.significant.rst | 1 + pyproject.toml | 13 +- scripts/ci/docker-compose/devcontainer.env | 4 +- scripts/ci/docker-compose/devcontainer.yml | 2 +- scripts/ci/kubernetes/k8s_requirements.txt | 2 +- .../ci/pre_commit/common_precommit_utils.py | 6 +- .../ci/pre_commit/compat_cache_on_methods.py | 69 ---- scripts/ci/pre_commit/mypy.py | 2 +- scripts/ci/pre_commit/mypy_folder.py | 2 +- scripts/ci/pre_commit/sync_init_decorator.py | 11 +- .../pre_commit/update_build_dependencies.py | 4 +- scripts/docker/entrypoint_ci.sh | 2 +- .../install_airflow_and_providers.py | 2 +- .../in_container/run_generate_constraints.py | 4 +- .../providers/cloudant/hooks/test_cloudant.py | 2 +- .../google/cloud/operators/test_mlengine.py | 2 +- .../google/cloud/triggers/test_mlengine.py | 2 +- .../example_kubernetes_decorator.py | 4 +- tests/www/views/test_views.py | 4 +- 193 files changed, 998 insertions(+), 1215 deletions(-) delete mode 100644 airflow/compat/functools.py create mode 100644 newsfragments/42739.significant.rst delete mode 100755 scripts/ci/pre_commit/compat_cache_on_methods.py diff --git a/.github/actions/breeze/action.yml b/.github/actions/breeze/action.yml index 164914c3d525b..69ebcc7c66e6e 100644 --- a/.github/actions/breeze/action.yml +++ b/.github/actions/breeze/action.yml @@ -21,9 +21,6 @@ description: 'Sets up Python and Breeze' inputs: python-version: description: 'Python version to use' - # Version of Python used for reproducibility of the packages built - # Python 3.8 tarfile produces different tarballs than Python 3.9+ tarfile that's why we are forcing - # Python 3.9 for all release preparation commands to make sure that the tarballs are reproducible default: "3.9" outputs: host-python-version: diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index abf966faede02..6c6d55d75045e 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -16,7 +16,7 @@ # under the License. # --- -name: "Build Images" +name: Build Images run-name: > Build images for ${{ github.event.pull_request.title }} ${{ github.event.pull_request._links.html.href }} on: # yamllint disable-line rule:truthy @@ -54,7 +54,7 @@ concurrency: jobs: build-info: timeout-minutes: 10 - name: "Build Info" + name: Build Info # At build-info stage we do not yet have outputs so we need to hard-code the runs-on to public runners runs-on: ["ubuntu-22.04"] env: @@ -89,7 +89,7 @@ jobs: }}" if: github.repository == 'apache/airflow' steps: - - name: "Cleanup repo" + - name: Cleanup repo shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: Discover PR merge commit @@ -154,13 +154,13 @@ jobs: # COMPOSITE ACTIONS. WE CAN RUN ANYTHING THAT IS IN THE TARGET BRANCH AND THERE IS NO RISK THAT # CODE WILL BE RUN FROM THE PR. #################################################################################################### - - name: "Cleanup docker" + - name: Cleanup docker run: ./scripts/ci/cleanup_docker.sh - - name: "Setup python" + - name: Setup python uses: actions/setup-python@v5 with: - python-version: 3.8 - - name: "Install Breeze" + python-version: "3.9" + - name: Install Breeze uses: ./.github/actions/breeze #################################################################################################### # WE RUN SELECTIVE CHECKS HERE USING THE TARGET COMMIT AND ITS PARENT TO BE ABLE TO COMPARE THEM @@ -212,7 +212,7 @@ jobs: docker-cache: ${{ needs.build-info.outputs.docker-cache }} generate-constraints: - name: "Generate constraints" + name: Generate constraints needs: [build-info, build-ci-images] uses: ./.github/workflows/generate-constraints.yml with: @@ -247,7 +247,7 @@ jobs: push-image: "true" use-uv: "true" image-tag: ${{ needs.build-info.outputs.image-tag }} - platform: "linux/amd64" + platform: linux/amd64 python-versions: ${{ needs.build-info.outputs.python-versions }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} branch: ${{ needs.build-info.outputs.default-branch }} diff --git a/.github/workflows/check-providers.yml b/.github/workflows/check-providers.yml index b394f7927329a..f5ff95b73c2f7 100644 --- a/.github/workflows/check-providers.yml +++ b/.github/workflows/check-providers.yml @@ -108,10 +108,9 @@ jobs: run: > breeze release-management generate-issue-content-providers --only-available-in-dist --disable-progress - - name: > - Remove incompatible Python ${{ matrix.python-version }} provider packages + - name: Remove Python 3.9-incompatible provider packages run: | - echo "Removing Python 3.8-incompatible provider: cloudant" + echo "Removing Python 3.9-incompatible provider: cloudant" rm -vf dist/apache_airflow_providers_cloudant* - name: "Generate source constraints from CI image" shell: bash diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2ad8c6a959cec..e33038432b541 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1066,13 +1066,6 @@ repos: files: \.py$ exclude: ^.*/.*_vendor/ additional_dependencies: ['rich>=12.4.4'] - - id: check-compat-cache-on-methods - name: Check that compat cache do not use on class methods - entry: ./scripts/ci/pre_commit/compat_cache_on_methods.py - language: python - pass_filenames: true - files: ^airflow/.*\.py$ - exclude: ^.*/.*_vendor/ - id: check-code-deprecations name: Check deprecations categories in decorators entry: ./scripts/ci/pre_commit/check_deprecations.py diff --git a/.readthedocs.yml b/.readthedocs.yml index aa16e3a8e3d57..4aa28ee78b036 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -20,7 +20,7 @@ formats: [] sphinx: configuration: docs/rtd-deprecation/conf.py python: - version: "3.8" + version: "3.9" install: - method: pip path: . diff --git a/Dockerfile b/Dockerfile index cfb894ac87d22..4525a717728a8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -47,7 +47,7 @@ ARG AIRFLOW_USER_HOME_DIR=/home/airflow # latest released version here ARG AIRFLOW_VERSION="2.10.2" -ARG PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" +ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" ARG AIRFLOW_PIP_VERSION=24.2 ARG AIRFLOW_UV_VERSION=0.4.17 diff --git a/Dockerfile.ci b/Dockerfile.ci index f7b7bb4172025..304d2a4a2d46d 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -16,7 +16,7 @@ # # WARNING: THIS DOCKERFILE IS NOT INTENDED FOR PRODUCTION USE OR DEPLOYMENT. # -ARG PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" +ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" ############################################################################################## # This is the script image where we keep all inlined bash scripts needed in other segments @@ -810,7 +810,7 @@ chmod 1777 /tmp AIRFLOW_SOURCES=$(cd "${IN_CONTAINER_DIR}/../.." || exit 1; pwd) -PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.8} +PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.9} export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}} diff --git a/INSTALL b/INSTALL index 5ccabe2ff3270..6583d9de44206 100644 --- a/INSTALL +++ b/INSTALL @@ -141,9 +141,7 @@ This is what it shows currently: ┏━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ ┃ Name ┃ Type ┃ Description ┃ ┡━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ default │ virtual │ Default environment with Python 3.8 for maximum compatibility │ -├─────────────┼─────────┼───────────────────────────────────────────────────────────────┤ -│ airflow-38 │ virtual │ Environment with Python 3.8. No devel installed. │ +│ default │ virtual │ Default environment with Python 3.9 for maximum compatibility │ ├─────────────┼─────────┼───────────────────────────────────────────────────────────────┤ │ airflow-39 │ virtual │ Environment with Python 3.9. No devel installed. │ ├─────────────┼─────────┼───────────────────────────────────────────────────────────────┤ @@ -154,7 +152,7 @@ This is what it shows currently: │ airflow-312 │ virtual │ Environment with Python 3.12. No devel installed │ └─────────────┴─────────┴───────────────────────────────────────────────────────────────┘ -The default env (if you have not used one explicitly) is `default` and it is a Python 3.8 +The default env (if you have not used one explicitly) is `default` and it is a Python 3.9 virtualenv for maximum compatibility with `devel` extra installed - this devel extra contains the minimum set of dependencies and tools that should be used during unit testing of core Airflow and running all `airflow` CLI commands - without support for providers or databases. @@ -228,15 +226,15 @@ to avoid "works-for-me" syndrome, where you use different versions of dependenci that are used in main CI tests and by other contributors. There are different constraint files for different Python versions. For example, this command will install -all basic devel requirements and requirements of Google provider as last successfully tested for Python 3.8: +all basic devel requirements and requirements of Google provider as last successfully tested for Python 3.9: pip install -e ".[devel,google]"" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt" Using the 'constraints-no-providers' constraint files, you can upgrade Airflow without paying attention to the provider's dependencies. This allows you to keep installed provider dependencies and install the latest supported ones using pure Airflow core. pip install -e ".[devel]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.9.txt" Airflow extras ============== diff --git a/README.md b/README.md index 3cd6416e93405..0419ae0456070 100644 --- a/README.md +++ b/README.md @@ -99,7 +99,7 @@ Apache Airflow is tested with: | | Main version (dev) | Stable version (2.10.2) | |------------|----------------------------|----------------------------| -| Python | 3.8, 3.9, 3.10, 3.11, 3.12 | 3.8, 3.9, 3.10, 3.11, 3.12 | +| Python | 3.9, 3.10, 3.11, 3.12 | 3.8, 3.9, 3.10, 3.11, 3.12 | | Platform | AMD64/ARM64(\*) | AMD64/ARM64(\*) | | Kubernetes | 1.28, 1.29, 1.30, 1.31 | 1.27, 1.28, 1.29, 1.30 | | PostgreSQL | 12, 13, 14, 15, 16, 17 | 12, 13, 14, 15, 16 | @@ -178,14 +178,14 @@ them to the appropriate format and workflow that your tool requires. ```bash pip install 'apache-airflow==2.10.2' \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.9.txt" ``` 2. Installing with extras (i.e., postgres, google) ```bash pip install 'apache-airflow[postgres,google]==2.10.2' \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.9.txt" ``` For information on installing provider packages, check @@ -313,7 +313,7 @@ They are based on the official release schedule of Python and Kubernetes, nicely 1. We drop support for Python and Kubernetes versions when they reach EOL. Except for Kubernetes, a version stays supported by Airflow if two major cloud providers still provide support for it. We drop support for those EOL versions in main right after EOL date, and it is effectively removed when we release - the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow. For example, for Python 3.8 it + the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow. For example, for Python 3.9 it means that we will drop support in main right after 27.06.2023, and the first MAJOR or MINOR version of Airflow released after will not have it. diff --git a/airflow/cli/commands/connection_command.py b/airflow/cli/commands/connection_command.py index f68830c490bc8..aace3f9c9aede 100644 --- a/airflow/cli/commands/connection_command.py +++ b/airflow/cli/commands/connection_command.py @@ -21,6 +21,7 @@ import json import os import warnings +from functools import cache from pathlib import Path from typing import Any from urllib.parse import urlsplit, urlunsplit @@ -30,7 +31,6 @@ from airflow.cli.simple_table import AirflowConsole from airflow.cli.utils import is_stdout, print_export_output -from airflow.compat.functools import cache from airflow.configuration import conf from airflow.exceptions import AirflowNotFoundException from airflow.hooks.base import BaseHook diff --git a/airflow/compat/functools.py b/airflow/compat/functools.py deleted file mode 100644 index 7b521f9f9da2e..0000000000000 --- a/airflow/compat/functools.py +++ /dev/null @@ -1,33 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import sys - -if sys.version_info >= (3, 9): - from functools import cache -else: - from functools import lru_cache - - cache = lru_cache(maxsize=None) - -# We need to keep it around, in case it was used in the code of old providers, but since we are -# Python 3.8+ we can directly import the functools one -from functools import cached_property # type: ignore - -__all__ = ["cache", "cached_property"] diff --git a/airflow/configuration.py b/airflow/configuration.py index f50e19268380b..81dc18365392e 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -1790,9 +1790,7 @@ def load_providers_configuration(self): ) self._default_values = create_default_config_parser(self.configuration_description) # sensitive_config_values needs to be refreshed here. This is a cached_property, so we can delete - # the cached values, and it will be refreshed on next access. This has been an implementation - # detail in Python 3.8 but as of Python 3.9 it is documented behaviour. - # See https://docs.python.org/3/library/functools.html#functools.cached_property + # the cached values, and it will be refreshed on next access. try: del self.sensitive_config_values except AttributeError: diff --git a/airflow/io/__init__.py b/airflow/io/__init__.py index 9996a77717ae0..49f2711c3c6cd 100644 --- a/airflow/io/__init__.py +++ b/airflow/io/__init__.py @@ -18,6 +18,7 @@ import inspect import logging +from functools import cache from typing import ( TYPE_CHECKING, Callable, @@ -26,7 +27,6 @@ from fsspec.implementations.local import LocalFileSystem -from airflow.compat.functools import cache from airflow.providers_manager import ProvidersManager from airflow.stats import Stats from airflow.utils.module_loading import import_string diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index 31f7b6535a9c2..e75ad83923bf2 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -31,6 +31,7 @@ from contextlib import nullcontext from datetime import timedelta from enum import Enum +from functools import cache from typing import TYPE_CHECKING, Any, Callable, Collection, Generator, Iterable, Mapping, Tuple from urllib.parse import quote @@ -69,7 +70,6 @@ from airflow.api_internal.internal_api_call import InternalApiConfig, internal_api_call from airflow.assets import Asset, AssetAlias from airflow.assets.manager import asset_manager -from airflow.compat.functools import cache from airflow.configuration import conf from airflow.exceptions import ( AirflowException, diff --git a/airflow/operators/python.py b/airflow/operators/python.py index a4788caedf438..b032b45ed3e6e 100644 --- a/airflow/operators/python.py +++ b/airflow/operators/python.py @@ -30,13 +30,13 @@ import warnings from abc import ABCMeta, abstractmethod from collections.abc import Container +from functools import cache from pathlib import Path from tempfile import TemporaryDirectory from typing import TYPE_CHECKING, Any, Callable, Collection, Iterable, Mapping, NamedTuple, Sequence import lazy_object_proxy -from airflow.compat.functools import cache from airflow.exceptions import ( AirflowConfigException, AirflowException, diff --git a/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst b/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst index 48980f2153cd0..3d3e95c28b172 100644 --- a/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst +++ b/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst @@ -454,7 +454,7 @@ If you have pre-commit installed, pre-commit will be run automatically on commit manually after commit, you can run it via ``breeze static-checks --last-commit`` some of the tests might fail because suspension of the provider might cause changes in the dependencies, so if you see errors about missing dependencies imports, non-usable classes etc., you will need to build the CI image locally -via ``breeze build-image --python 3.8 --upgrade-to-newer-dependencies`` after the first pre-commit run +via ``breeze build-image --python 3.9 --upgrade-to-newer-dependencies`` after the first pre-commit run and then run the static checks again. If you want to be absolutely sure to run all static checks you can always do this via diff --git a/airflow/providers/amazon/aws/transfers/sql_to_s3.py b/airflow/providers/amazon/aws/transfers/sql_to_s3.py index 65e40797a59b1..19bc7f016b186 100644 --- a/airflow/providers/amazon/aws/transfers/sql_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/sql_to_s3.py @@ -223,12 +223,9 @@ def _partition_dataframe(self, df: pd.DataFrame) -> Iterable[tuple[str, pd.DataF for group_label in (grouped_df := df.groupby(**self.groupby_kwargs)).groups: yield ( cast(str, group_label), - cast( - "pd.DataFrame", - grouped_df.get_group(group_label) - .drop(random_column_name, axis=1, errors="ignore") - .reset_index(drop=True), - ), + grouped_df.get_group(group_label) + .drop(random_column_name, axis=1, errors="ignore") + .reset_index(drop=True), ) def _get_hook(self) -> DbApiHook: diff --git a/airflow/providers/amazon/aws/utils/mixins.py b/airflow/providers/amazon/aws/utils/mixins.py index 9dbbde914874c..2fb80bcb1517c 100644 --- a/airflow/providers/amazon/aws/utils/mixins.py +++ b/airflow/providers/amazon/aws/utils/mixins.py @@ -27,12 +27,11 @@ from __future__ import annotations -from functools import cached_property +from functools import cache, cached_property from typing import Any, Generic, NamedTuple, TypeVar from typing_extensions import final -from airflow.compat.functools import cache from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook AwsHookType = TypeVar("AwsHookType", bound=AwsGenericHook) diff --git a/airflow/providers/cloudant/provider.yaml b/airflow/providers/cloudant/provider.yaml index 09857936b877b..a2de5b4335468 100644 --- a/airflow/providers/cloudant/provider.yaml +++ b/airflow/providers/cloudant/provider.yaml @@ -51,10 +51,9 @@ dependencies: excluded-python-versions: # ibmcloudant transitively brings in urllib3 2.x, but the snowflake provider has a dependency that pins - # urllib3 to 1.x on Python 3.8 and 3.9; thus we exclude those Python versions from taking the update + # urllib3 to 1.x on Python 3.9; thus we exclude those Python versions from taking the update # to ibmcloudant. # See #21004, #41555, and https://github.com/snowflakedb/snowflake-connector-python/issues/2016 - - "3.8" - "3.9" integrations: diff --git a/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py b/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py index d337caf14c113..c603f8a178b37 100644 --- a/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py +++ b/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py @@ -19,6 +19,7 @@ import logging import secrets import string +from functools import cache from typing import TYPE_CHECKING import pendulum @@ -26,7 +27,6 @@ from kubernetes.client.rest import ApiException from slugify import slugify -from airflow.compat.functools import cache from airflow.configuration import conf from airflow.exceptions import AirflowProviderDeprecationWarning diff --git a/airflow/providers/common/io/xcom/backend.py b/airflow/providers/common/io/xcom/backend.py index af55baa4c0628..256b503181e0e 100644 --- a/airflow/providers/common/io/xcom/backend.py +++ b/airflow/providers/common/io/xcom/backend.py @@ -19,12 +19,13 @@ import contextlib import json import uuid +from functools import cache +from pathlib import Path from typing import TYPE_CHECKING, Any, TypeVar from urllib.parse import urlsplit import fsspec.utils -from airflow.compat.functools import cache from airflow.configuration import conf from airflow.io.path import ObjectStoragePath from airflow.models.xcom import BaseXCom @@ -40,22 +41,6 @@ SECTION = "common.io" -def _is_relative_to(o: ObjectStoragePath, other: ObjectStoragePath) -> bool: - """ - Return whether or not this path is relative to the other path. - - This is a port of the pathlib.Path.is_relative_to method. It is not available in python 3.8. - """ - if hasattr(o, "is_relative_to"): - return o.is_relative_to(other) - - try: - o.relative_to(other) - return True - except ValueError: - return False - - def _get_compression_suffix(compression: str) -> str: """ Return the compression suffix for the given compression. @@ -111,7 +96,7 @@ def _get_full_path(data: str) -> ObjectStoragePath: raise TypeError(f"Not a valid url: {data}") from None if url.scheme: - if not _is_relative_to(ObjectStoragePath(data), p): + if not Path.is_relative_to(ObjectStoragePath(data), p): raise ValueError(f"Invalid key: {data}") return p / data.replace(str(p), "", 1).lstrip("/") diff --git a/airflow/providers/openlineage/conf.py b/airflow/providers/openlineage/conf.py index b0c763b280a46..9c0253bbab408 100644 --- a/airflow/providers/openlineage/conf.py +++ b/airflow/providers/openlineage/conf.py @@ -35,7 +35,7 @@ def decorator(func): cache = decorator else: - from airflow.compat.functools import cache + from functools import cache from airflow.configuration import conf _CONFIG_SECTION = "openlineage" diff --git a/airflow/serialization/serialized_objects.py b/airflow/serialization/serialized_objects.py index a4801b767acc5..9f180c2a5deac 100644 --- a/airflow/serialization/serialized_objects.py +++ b/airflow/serialization/serialized_objects.py @@ -24,6 +24,7 @@ import inspect import logging import weakref +from functools import cache from inspect import signature from textwrap import dedent from typing import TYPE_CHECKING, Any, Collection, Iterable, Mapping, NamedTuple, Union, cast @@ -43,7 +44,6 @@ _AssetAliasCondition, ) from airflow.callbacks.callback_requests import DagCallbackRequest, TaskCallbackRequest -from airflow.compat.functools import cache from airflow.exceptions import AirflowException, SerializationError, TaskDeferred from airflow.jobs.job import Job from airflow.models import Trigger diff --git a/airflow/serialization/serializers/timezone.py b/airflow/serialization/serializers/timezone.py index a1f40e67c6972..3d2a29ea08bc4 100644 --- a/airflow/serialization/serializers/timezone.py +++ b/airflow/serialization/serializers/timezone.py @@ -18,7 +18,6 @@ from __future__ import annotations import datetime -import sys from typing import TYPE_CHECKING, Any, cast from airflow.utils.module_loading import qualname @@ -30,15 +29,9 @@ serializers = [ "pendulum.tz.timezone.FixedTimezone", "pendulum.tz.timezone.Timezone", + "zoneinfo.ZoneInfo", ] -PY39 = sys.version_info >= (3, 9) - -if PY39: - serializers.append("zoneinfo.ZoneInfo") -else: - serializers.append("backports.zoneinfo.ZoneInfo") - deserializers = serializers __version__ = 1 @@ -83,11 +76,8 @@ def deserialize(classname: str, version: int, data: object) -> Any: if version > __version__: raise TypeError(f"serialized {version} of {classname} > {__version__}") - if "zoneinfo.ZoneInfo" in classname: - try: - from zoneinfo import ZoneInfo - except ImportError: - from backports.zoneinfo import ZoneInfo + if classname == "backports.zoneinfo.ZoneInfo" and isinstance(data, str): + from zoneinfo import ZoneInfo return ZoneInfo(data) diff --git a/airflow/utils/log/secrets_masker.py b/airflow/utils/log/secrets_masker.py index 13c93d992fffa..4f9604aced7f4 100644 --- a/airflow/utils/log/secrets_masker.py +++ b/airflow/utils/log/secrets_masker.py @@ -22,7 +22,7 @@ import logging import sys from enum import Enum -from functools import cached_property +from functools import cache, cached_property from typing import ( TYPE_CHECKING, Any, @@ -42,7 +42,6 @@ import re2 from airflow import settings -from airflow.compat.functools import cache if TYPE_CHECKING: from kubernetes.client import V1EnvVar diff --git a/airflow/utils/platform.py b/airflow/utils/platform.py index 7945e2b945382..74f56a0ab037d 100644 --- a/airflow/utils/platform.py +++ b/airflow/utils/platform.py @@ -24,8 +24,7 @@ import pkgutil import platform import sys - -from airflow.compat.functools import cache +from functools import cache IS_WINDOWS = platform.system() == "Windows" diff --git a/airflow/www/forms.py b/airflow/www/forms.py index 5b746ab633c06..a3b1d5262db20 100644 --- a/airflow/www/forms.py +++ b/airflow/www/forms.py @@ -20,6 +20,7 @@ import datetime import json import operator +from functools import cache from typing import Iterator import pendulum @@ -36,7 +37,6 @@ from wtforms.fields import Field, IntegerField, PasswordField, SelectField, StringField, TextAreaField from wtforms.validators import InputRequired, Optional -from airflow.compat.functools import cache from airflow.configuration import conf from airflow.providers_manager import ProvidersManager from airflow.utils.types import DagRunType diff --git a/airflow/www/views.py b/airflow/www/views.py index 47c548d5e7667..7782da955c92e 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -32,7 +32,7 @@ import warnings from bisect import insort_left from collections import defaultdict -from functools import cached_property +from functools import cache, cached_property from json import JSONDecodeError from pathlib import Path from typing import TYPE_CHECKING, Any, Collection, Iterator, Mapping, MutableMapping, Sequence @@ -89,7 +89,6 @@ ) from airflow.assets import Asset, AssetAlias from airflow.auth.managers.models.resource_details import AccessView, DagAccessEntity, DagDetails -from airflow.compat.functools import cache from airflow.configuration import AIRFLOW_CONFIG, conf from airflow.exceptions import ( AirflowConfigException, @@ -178,7 +177,7 @@ def sanitize_args(args: dict[str, Any]) -> dict[str, Any]: return {key: value for key, value in args.items() if not key.startswith("_")} -# Following the release of https://github.com/python/cpython/issues/102153 in Python 3.8.17 and 3.9.17 on +# Following the release of https://github.com/python/cpython/issues/102153 in Python 3.9.17 on # June 6, 2023, we are adding extra sanitization of the urls passed to get_safe_url method to make it works # the same way regardless if the user uses latest Python patchlevel versions or not. This also follows # a recommended solution by the Python core team. diff --git a/clients/python/README.md b/clients/python/README.md index e1427fce92d46..7bba821e43e62 100644 --- a/clients/python/README.md +++ b/clients/python/README.md @@ -248,7 +248,7 @@ For more information, please visit [https://airflow.apache.org](https://airflow. ## Requirements. -Python >=3.8 +Python >=3.9 ## Installation & Usage diff --git a/clients/python/pyproject.toml b/clients/python/pyproject.toml index 1a5ccdc9e2b63..1584744249646 100644 --- a/clients/python/pyproject.toml +++ b/clients/python/pyproject.toml @@ -25,7 +25,7 @@ dynamic = ["version"] description = "Apache Airflow API (Stable)" readme = "README.md" license-files.globs = ["LICENSE", "NOTICE"] -requires-python = "~=3.8" +requires-python = "~=3.9" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] @@ -42,7 +42,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -74,7 +73,7 @@ run-coverage = "pytest test" run = "run-coverage --no-cov" [[tool.hatch.envs.test.matrix]] -python = ["3.8", "3.9", "3.10", "3.11"] +python = ["3.9", "3.10", "3.11"] [tool.hatch.version] path = "./version.txt" diff --git a/clients/python/test_python_client.py b/clients/python/test_python_client.py index d4d3f98efd834..5d0accdc019ff 100644 --- a/clients/python/test_python_client.py +++ b/clients/python/test_python_client.py @@ -17,7 +17,7 @@ # # PEP 723 compliant inline script metadata (not yet widely supported) # /// script -# requires-python = ">=3.8" +# requires-python = ">=3.9" # dependencies = [ # "apache-airflow-client", # "rich", diff --git a/constraints/README.md b/constraints/README.md index 791450d1bd7c9..9d02755dc5cd6 100644 --- a/constraints/README.md +++ b/constraints/README.md @@ -29,12 +29,12 @@ This allows you to iterate on dependencies without having to run `--upgrade-to-n Typical workflow in this case is: * download and copy the constraint file to the folder (for example via -[The GitHub Raw Link](https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.8.txt) +[The GitHub Raw Link](https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt) * modify the constraint file in "constraints" folder * build the image using this command ```bash -breeze ci-image build --python 3.8 --airflow-constraints-location constraints/constraints-3.8txt +breeze ci-image build --python 3.9 --airflow-constraints-location constraints/constraints-3.9.txt ``` You can continue iterating and updating the constraint file (and rebuilding the image) @@ -46,7 +46,7 @@ pip freeze | sort | \ grep -v "apache_airflow" | \ grep -v "apache-airflow==" | \ grep -v "@" | \ - grep -v "/opt/airflow" > /opt/airflow/constraints/constraints-3.8.txt + grep -v "/opt/airflow" > /opt/airflow/constraints/constraints-3.9.txt ``` If you are working with others on updating the dependencies, you can also commit the constraint diff --git a/contributing-docs/03_contributors_quick_start.rst b/contributing-docs/03_contributors_quick_start.rst index 8f7ead6deacc4..bddecc6f1e486 100644 --- a/contributing-docs/03_contributors_quick_start.rst +++ b/contributing-docs/03_contributors_quick_start.rst @@ -256,7 +256,7 @@ Setting up Breeze .. code-block:: bash - breeze --python 3.8 --backend postgres + breeze --python 3.9 --backend postgres .. note:: If you encounter an error like "docker.credentials.errors.InitializationError: @@ -313,7 +313,7 @@ Using Breeze ------------ 1. Starting breeze environment using ``breeze start-airflow`` starts Breeze environment with last configuration run( - In this case python and backend will be picked up from last execution ``breeze --python 3.8 --backend postgres``) + In this case python and backend will be picked up from last execution ``breeze --python 3.9 --backend postgres``) It also automatically starts webserver, backend and scheduler. It drops you in tmux with scheduler in bottom left and webserver in bottom right. Use ``[Ctrl + B] and Arrow keys`` to navigate. @@ -324,9 +324,9 @@ Using Breeze Use CI image. Branch name: main - Docker image: ghcr.io/apache/airflow/main/ci/python3.8:latest + Docker image: ghcr.io/apache/airflow/main/ci/python3.9:latest Airflow source version: 2.4.0.dev0 - Python version: 3.8 + Python version: 3.9 Backend: mysql 5.7 @@ -365,7 +365,7 @@ Using Breeze .. code-block:: bash - breeze --python 3.8 --backend postgres + breeze --python 3.9 --backend postgres 2. Open tmux @@ -601,34 +601,27 @@ All Tests are inside ./tests directory. root@63528318c8b1:/opt/airflow# pytest tests/utils/test_dates.py ============================================================= test session starts ============================================================== - platform linux -- Python 3.8.16, pytest-7.2.1, pluggy-1.0.0 -- /usr/local/bin/python + platform linux -- Python 3.9.20, pytest-8.3.3, pluggy-1.5.0 -- /usr/local/bin/python cachedir: .pytest_cache - rootdir: /opt/airflow, configfile: pytest.ini - plugins: timeouts-1.2.1, capture-warnings-0.0.4, cov-4.0.0, requests-mock-1.10.0, rerunfailures-11.1.1, anyio-3.6.2, instafail-0.4.2, time-machine-2.9.0, asyncio-0.20.3, httpx-0.21.3, xdist-3.2.0 - asyncio: mode=strict + rootdir: /opt/airflow + configfile: pyproject.toml + plugins: anyio-4.6.0, time-machine-2.15.0, icdiff-0.9, rerunfailures-14.0, instafail-0.5.0, custom-exit-code-0.3.0, xdist-3.6.1, mock-3.14.0, cov-5.0.0, asyncio-0.24.0, requests-mock-1.12.1, timeouts-1.2.1 + asyncio: mode=strict, default_loop_scope=None setup timeout: 0.0s, execution timeout: 0.0s, teardown timeout: 0.0s - collected 12 items - - tests/utils/test_dates.py::TestDates::test_days_ago PASSED [ 8%] - tests/utils/test_dates.py::TestDates::test_parse_execution_date PASSED [ 16%] - tests/utils/test_dates.py::TestDates::test_round_time PASSED [ 25%] - tests/utils/test_dates.py::TestDates::test_infer_time_unit PASSED [ 33%] - tests/utils/test_dates.py::TestDates::test_scale_time_units PASSED [ 41%] - tests/utils/test_dates.py::TestUtilsDatesDateRange::test_no_delta PASSED [ 50%] - tests/utils/test_dates.py::TestUtilsDatesDateRange::test_end_date_before_start_date PASSED [ 58%] - tests/utils/test_dates.py::TestUtilsDatesDateRange::test_both_end_date_and_num_given PASSED [ 66%] - tests/utils/test_dates.py::TestUtilsDatesDateRange::test_invalid_delta PASSED [ 75%] - tests/utils/test_dates.py::TestUtilsDatesDateRange::test_positive_num_given PASSED [ 83%] - tests/utils/test_dates.py::TestUtilsDatesDateRange::test_negative_num_given PASSED [ 91%] - tests/utils/test_dates.py::TestUtilsDatesDateRange::test_delta_cron_presets PASSED [100%] - - ============================================================== 12 passed in 0.24s ============================================================== + collected 4 items + + tests/utils/test_dates.py::TestDates::test_parse_execution_date PASSED [ 25%] + tests/utils/test_dates.py::TestDates::test_round_time PASSED [ 50%] + tests/utils/test_dates.py::TestDates::test_infer_time_unit PASSED [ 75%] + tests/utils/test_dates.py::TestDates::test_scale_time_units PASSED [100%] + + ================================================================== 4 passed in 3.30s =================================================================== - Running All the test with Breeze by specifying required python version, backend, backend version .. code-block:: bash - breeze --backend postgres --postgres-version 15 --python 3.8 --db-reset testing tests --test-type All + breeze --backend postgres --postgres-version 15 --python 3.9 --db-reset testing tests --test-type All - Running specific type of test @@ -638,7 +631,7 @@ All Tests are inside ./tests directory. .. code-block:: bash - breeze --backend postgres --postgres-version 15 --python 3.8 --db-reset testing tests --test-type Core + breeze --backend postgres --postgres-version 15 --python 3.9 --db-reset testing tests --test-type Core - Running Integration test for specific test type @@ -647,7 +640,7 @@ All Tests are inside ./tests directory. .. code-block:: bash - breeze --backend postgres --postgres-version 15 --python 3.8 --db-reset testing tests --test-type All --integration mongo + breeze --backend postgres --postgres-version 15 --python 3.9 --db-reset testing tests --test-type All --integration mongo - For more information on Testing visit : |09_testing.rst| diff --git a/contributing-docs/05_pull_requests.rst b/contributing-docs/05_pull_requests.rst index ea9300f9c643f..1e14167943497 100644 --- a/contributing-docs/05_pull_requests.rst +++ b/contributing-docs/05_pull_requests.rst @@ -92,7 +92,7 @@ these guidelines: you can push your code to PR and see results of the tests in the CI. - You can use any supported python version to run the tests, but the best is to check - if it works for the oldest supported version (Python 3.8 currently). In rare cases + if it works for the oldest supported version (Python 3.9 currently). In rare cases tests might fail with the oldest version when you use features that are available in newer Python versions. For that purpose we have ``airflow.compat`` package where we keep back-ported useful features from newer versions. diff --git a/contributing-docs/07_local_virtualenv.rst b/contributing-docs/07_local_virtualenv.rst index 8439eb2ab2089..2c92edee97a7b 100644 --- a/contributing-docs/07_local_virtualenv.rst +++ b/contributing-docs/07_local_virtualenv.rst @@ -37,7 +37,7 @@ Required Software Packages Use system-level package managers like yum, apt-get for Linux, or Homebrew for macOS to install required software packages: -* Python (One of: 3.8, 3.9, 3.10, 3.11, 3.12) +* Python (One of: 3.9, 3.10, 3.11, 3.12) * MySQL 5.7+ * libxml * helm (only for helm chart tests) @@ -187,9 +187,7 @@ This is what it shows currently: +-------------+---------+---------------------------------------------------------------+ | Name | Type | Description | +=============+=========+===============================================================+ -| default | virtual | Default environment with Python 3.8 for maximum compatibility | -+-------------+---------+---------------------------------------------------------------+ -| airflow-38 | virtual | Environment with Python 3.8. No devel installed. | +| default | virtual | Default environment with Python 3.9 for maximum compatibility | +-------------+---------+---------------------------------------------------------------+ | airflow-39 | virtual | Environment with Python 3.9. No devel installed. | +-------------+---------+---------------------------------------------------------------+ @@ -200,7 +198,7 @@ This is what it shows currently: | airflow-312 | virtual | Environment with Python 3.12. No devel installed | +-------------+---------+---------------------------------------------------------------+ -The default env (if you have not used one explicitly) is ``default`` and it is a Python 3.8 +The default env (if you have not used one explicitly) is ``default`` and it is a Python 3.9 virtualenv for maximum compatibility. You can install devel set of dependencies with it by running: @@ -381,12 +379,12 @@ to avoid "works-for-me" syndrome, where you use different version of dependencie that are used in main, CI tests and by other contributors. There are different constraint files for different python versions. For example this command will install -all basic devel requirements and requirements of google provider as last successfully tested for Python 3.8: +all basic devel requirements and requirements of google provider as last successfully tested for Python 3.9: .. code:: bash pip install -e ".[devel,google]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.9.txt" Make sure to use latest main for such installation, those constraints are "development constraints" and they are refreshed several times a day to make sure they are up to date with the latest changes in the main branch. @@ -403,7 +401,7 @@ and install to latest supported ones by pure airflow core. .. code:: bash pip install -e ".[devel]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.9.txt" These are examples of the development options available with the local virtualenv in your IDE: diff --git a/contributing-docs/08_static_code_checks.rst b/contributing-docs/08_static_code_checks.rst index d50b9db3e607f..422a9f027e1ed 100644 --- a/contributing-docs/08_static_code_checks.rst +++ b/contributing-docs/08_static_code_checks.rst @@ -40,7 +40,7 @@ use. So, you can be sure your modifications will also work for CI if they pass pre-commit hooks. We have integrated the fantastic `pre-commit `__ framework -in our development workflow. To install and use it, you need at least Python 3.8 locally. +in our development workflow. To install and use it, you need at least Python 3.9 locally. Installing pre-commit hooks --------------------------- @@ -152,8 +152,6 @@ require Breeze Docker image to be built locally. +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-common-compat-used-for-openlineage | Check common.compat is used for OL deprecated classes | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ -| check-compat-cache-on-methods | Check that compat cache do not use on class methods | | -+-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-core-deprecation-classes | Verify usage of Airflow deprecation classes in core | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-daysago-import-from-utils | days_ago imported from airflow.utils.dates | | diff --git a/contributing-docs/12_airflow_dependencies_and_extras.rst b/contributing-docs/12_airflow_dependencies_and_extras.rst index 16d2f32ee172d..8bfbdb630c9f7 100644 --- a/contributing-docs/12_airflow_dependencies_and_extras.rst +++ b/contributing-docs/12_airflow_dependencies_and_extras.rst @@ -86,7 +86,7 @@ from the PyPI package: .. code-block:: bash pip install "apache-airflow[google,amazon,async]==2.2.5" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.2.5/constraints-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.2.5/constraints-3.9.txt" The last one can be used to install Airflow in "minimal" mode - i.e when bare Airflow is installed without extras. @@ -98,7 +98,7 @@ requirements). .. code-block:: bash pip install -e ".[devel]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.9.txt" This also works with extras - for example: @@ -106,7 +106,7 @@ This also works with extras - for example: .. code-block:: bash pip install ".[ssh]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.9.txt" There are different set of fixed constraint files for different python major/minor versions and you should @@ -118,7 +118,7 @@ using ``constraints-no-providers`` constraint files as well. .. code-block:: bash pip install . --upgrade \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.9.txt" The ``constraints-.txt`` and ``constraints-no-providers-.txt`` diff --git a/contributing-docs/testing/docker_compose_tests.rst b/contributing-docs/testing/docker_compose_tests.rst index 94864b4137de8..921a3cafb193b 100644 --- a/contributing-docs/testing/docker_compose_tests.rst +++ b/contributing-docs/testing/docker_compose_tests.rst @@ -48,7 +48,7 @@ Running complete test with breeze: .. code-block:: bash - breeze prod-image build --python 3.8 + breeze prod-image build --python 3.9 breeze testing docker-compose-tests In case the test fails, it will dump the logs from the running containers to the console and it @@ -65,8 +65,8 @@ to see the output of the test as it happens (it can be also set via The test can be also run manually with ``pytest docker_tests/test_docker_compose_quick_start.py`` command, provided that you have a local airflow venv with ``dev`` extra set and the ``DOCKER_IMAGE`` environment variable is set to the image you want to test. The variable defaults -to ``ghcr.io/apache/airflow/main/prod/python3.8:latest`` which is built by default -when you run ``breeze prod-image build --python 3.8``. also the switches ``--skip-docker-compose-deletion`` +to ``ghcr.io/apache/airflow/main/prod/python3.9:latest`` which is built by default +when you run ``breeze prod-image build --python 3.9``. also the switches ``--skip-docker-compose-deletion`` and ``--wait-for-containers-timeout`` can only be passed via environment variables. If you want to debug the deployment using ``docker compose`` commands after ``SKIP_DOCKER_COMPOSE_DELETION`` @@ -87,7 +87,7 @@ the prod image build command above. .. code-block:: bash - export AIRFLOW_IMAGE_NAME=ghcr.io/apache/airflow/main/prod/python3.8:latest + export AIRFLOW_IMAGE_NAME=ghcr.io/apache/airflow/main/prod/python3.9:latest and follow the instructions in the `Running Airflow in Docker `_ diff --git a/contributing-docs/testing/k8s_tests.rst b/contributing-docs/testing/k8s_tests.rst index a4a6f67da0e2c..a9ba3151fe9bb 100644 --- a/contributing-docs/testing/k8s_tests.rst +++ b/contributing-docs/testing/k8s_tests.rst @@ -47,7 +47,7 @@ per each combination of Python and Kubernetes version. This is used during CI wh tests against those different clusters - even in parallel. The cluster name follows the pattern ``airflow-python-X.Y-vA.B.C`` where X.Y is a major/minor Python version -and A.B.C is Kubernetes version. Example cluster name: ``airflow-python-3.8-v1.24.0`` +and A.B.C is Kubernetes version. Example cluster name: ``airflow-python-3.9-v1.24.0`` Most of the commands can be executed in parallel for multiple images/clusters by adding ``--run-in-parallel`` to create clusters or deploy airflow. Similarly checking for status, dumping logs and deleting clusters @@ -215,7 +215,7 @@ Should result in KinD creating the K8S cluster. .. code-block:: text - Config created in /Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.8-v1.24.2/.kindconfig.yaml: + Config created in /Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.9-v1.24.2/.kindconfig.yaml: # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -251,7 +251,7 @@ Should result in KinD creating the K8S cluster. - Creating cluster "airflow-python-3.8-v1.24.2" ... + Creating cluster "airflow-python-3.9-v1.24.2" ... ✓ Ensuring node image (kindest/node:v1.24.2) 🖼 ✓ Preparing nodes 📦 📦 ✓ Writing configuration 📜 @@ -259,10 +259,10 @@ Should result in KinD creating the K8S cluster. ✓ Installing CNI 🔌 ✓ Installing StorageClass 💾 ✓ Joining worker nodes 🚜 - Set kubectl context to "kind-airflow-python-3.8-v1.24.2" + Set kubectl context to "kind-airflow-python-3.9-v1.24.2" You can now use your cluster with: - kubectl cluster-info --context kind-airflow-python-3.8-v1.24.2 + kubectl cluster-info --context kind-airflow-python-3.9-v1.24.2 Not sure what to do next? 😅 Check out https://kind.sigs.k8s.io/docs/user/quick-start/ @@ -270,9 +270,9 @@ Should result in KinD creating the K8S cluster. Connecting to localhost:18150. Num try: 1 Error when connecting to localhost:18150 : ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')) - Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.8 --kubernetes-version v1.24.2` to (re)deploy airflow + Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.9 --kubernetes-version v1.24.2` to (re)deploy airflow - KinD cluster airflow-python-3.8-v1.24.2 created! + KinD cluster airflow-python-3.9-v1.24.2 created! NEXT STEP: You might now configure your cluster by: @@ -286,20 +286,20 @@ Should result in KinD creating the K8S cluster. .. code-block:: text - Configuring airflow-python-3.8-v1.24.2 to be ready for Airflow deployment - Deleting K8S namespaces for kind-airflow-python-3.8-v1.24.2 + Configuring airflow-python-3.9-v1.24.2 to be ready for Airflow deployment + Deleting K8S namespaces for kind-airflow-python-3.9-v1.24.2 Error from server (NotFound): namespaces "airflow" not found Error from server (NotFound): namespaces "test-namespace" not found Creating namespaces namespace/airflow created namespace/test-namespace created - Created K8S namespaces for cluster kind-airflow-python-3.8-v1.24.2 + Created K8S namespaces for cluster kind-airflow-python-3.9-v1.24.2 - Deploying test resources for cluster kind-airflow-python-3.8-v1.24.2 + Deploying test resources for cluster kind-airflow-python-3.9-v1.24.2 persistentvolume/test-volume created persistentvolumeclaim/test-volume created service/airflow-webserver-node-port created - Deployed test resources for cluster kind-airflow-python-3.8-v1.24.2 + Deployed test resources for cluster kind-airflow-python-3.9-v1.24.2 NEXT STEP: You might now build your k8s image by: @@ -317,45 +317,45 @@ Should show the status of current KinD cluster. .. code-block:: text ======================================================================================================================== - Cluster: airflow-python-3.8-v1.24.2 + Cluster: airflow-python-3.9-v1.24.2 - * KUBECONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.8-v1.24.2/.kubeconfig - * KINDCONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.8-v1.24.2/.kindconfig.yaml + * KUBECONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.9-v1.24.2/.kubeconfig + * KINDCONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.9-v1.24.2/.kindconfig.yaml - Cluster info: airflow-python-3.8-v1.24.2 + Cluster info: airflow-python-3.9-v1.24.2 Kubernetes control plane is running at https://127.0.0.1:48366 CoreDNS is running at https://127.0.0.1:48366/api/v1/namespaces/kube-system/services/kube-dns:dns/proxy To further debug and diagnose cluster problems, use 'kubectl cluster-info dump'. - Storage class for airflow-python-3.8-v1.24.2 + Storage class for airflow-python-3.9-v1.24.2 NAME PROVISIONER RECLAIMPOLICY VOLUMEBINDINGMODE ALLOWVOLUMEEXPANSION AGE standard (default) rancher.io/local-path Delete WaitForFirstConsumer false 83s - Running pods for airflow-python-3.8-v1.24.2 + Running pods for airflow-python-3.9-v1.24.2 NAME READY STATUS RESTARTS AGE coredns-6d4b75cb6d-rwp9d 1/1 Running 0 71s coredns-6d4b75cb6d-vqnrc 1/1 Running 0 71s - etcd-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s + etcd-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s kindnet-ckc8l 1/1 Running 0 69s kindnet-qqt8k 1/1 Running 0 71s - kube-apiserver-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s - kube-controller-manager-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s + kube-apiserver-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s + kube-controller-manager-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s kube-proxy-6g7hn 1/1 Running 0 69s kube-proxy-dwfvp 1/1 Running 0 71s - kube-scheduler-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s + kube-scheduler-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s KinD Cluster API server URL: http://localhost:48366 Connecting to localhost:18150. Num try: 1 Error when connecting to localhost:18150 : ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')) - Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.8 --kubernetes-version v1.24.2` to (re)deploy airflow + Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.9 --kubernetes-version v1.24.2` to (re)deploy airflow - Cluster healthy: airflow-python-3.8-v1.24.2 + Cluster healthy: airflow-python-3.9-v1.24.2 5. Build the image base on PROD Airflow image. You need to build the PROD image first (the command will guide you if you did not) either by running the build separately or passing ``--rebuild-base-image`` @@ -373,15 +373,15 @@ Should show the status of current KinD cluster. .. code-block:: text - Building the K8S image for Python 3.8 using airflow base image: ghcr.io/apache/airflow/main/prod/python3.8:latest + Building the K8S image for Python 3.9 using airflow base image: ghcr.io/apache/airflow/main/prod/python3.9:latest [+] Building 0.1s (8/8) FINISHED => [internal] load build definition from Dockerfile 0.0s => => transferring dockerfile: 301B 0.0s => [internal] load .dockerignore 0.0s => => transferring context: 35B 0.0s - => [internal] load metadata for ghcr.io/apache/airflow/main/prod/python3.8:latest 0.0s - => [1/3] FROM ghcr.io/apache/airflow/main/prod/python3.8:latest 0.0s + => [internal] load metadata for ghcr.io/apache/airflow/main/prod/python3.9:latest 0.0s + => [1/3] FROM ghcr.io/apache/airflow/main/prod/python3.9:latest 0.0s => [internal] load build context 0.0s => => transferring context: 3.00kB 0.0s => CACHED [2/3] COPY airflow/example_dags/ /opt/airflow/dags/ 0.0s @@ -389,7 +389,7 @@ Should show the status of current KinD cluster. => exporting to image 0.0s => => exporting layers 0.0s => => writing image sha256:c0bdd363c549c3b0731b8e8ce34153d081f239ee2b582355b7b3ffd5394c40bb 0.0s - => => naming to ghcr.io/apache/airflow/main/prod/python3.8-kubernetes:latest + => => naming to ghcr.io/apache/airflow/main/prod/python3.9-kubernetes:latest NEXT STEP: You might now upload your k8s image by: @@ -409,9 +409,9 @@ Should show the status of current KinD cluster. Good version of kubectl installed: 1.25.0 in /Users/jarek/IdeaProjects/airflow/.build/.k8s-env/bin Good version of helm installed: 3.9.2 in /Users/jarek/IdeaProjects/airflow/.build/.k8s-env/bin Stable repo is already added - Uploading Airflow image ghcr.io/apache/airflow/main/prod/python3.8-kubernetes to cluster airflow-python-3.8-v1.24.2 - Image: "ghcr.io/apache/airflow/main/prod/python3.8-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.8-v1.24.2-worker", loading... - Image: "ghcr.io/apache/airflow/main/prod/python3.8-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.8-v1.24.2-control-plane", loading... + Uploading Airflow image ghcr.io/apache/airflow/main/prod/python3.9-kubernetes to cluster airflow-python-3.9-v1.24.2 + Image: "ghcr.io/apache/airflow/main/prod/python3.9-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.9-v1.24.2-worker", loading... + Image: "ghcr.io/apache/airflow/main/prod/python3.9-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.9-v1.24.2-control-plane", loading... NEXT STEP: You might now deploy airflow by: @@ -426,8 +426,8 @@ Should show the status of current KinD cluster. .. code-block:: text - Deploying Airflow for cluster airflow-python-3.8-v1.24.2 - Deploying kind-airflow-python-3.8-v1.24.2 with airflow Helm Chart. + Deploying Airflow for cluster airflow-python-3.9-v1.24.2 + Deploying kind-airflow-python-3.9-v1.24.2 with airflow Helm Chart. Copied chart sources to /private/var/folders/v3/gvj4_mw152q556w2rrh7m46w0000gn/T/chart_edu__kir/chart Deploying Airflow from /private/var/folders/v3/gvj4_mw152q556w2rrh7m46w0000gn/T/chart_edu__kir/chart NAME: airflow @@ -469,12 +469,12 @@ Should show the status of current KinD cluster. Information on how to set a static webserver secret key can be found here: https://airflow.apache.org/docs/helm-chart/stable/production-guide.html#webserver-secret-key - Deployed kind-airflow-python-3.8-v1.24.2 with airflow Helm Chart. + Deployed kind-airflow-python-3.9-v1.24.2 with airflow Helm Chart. - Airflow for Python 3.8 and K8S version v1.24.2 has been successfully deployed. + Airflow for Python 3.9 and K8S version v1.24.2 has been successfully deployed. - The KinD cluster name: airflow-python-3.8-v1.24.2 - The kubectl cluster name: kind-airflow-python-3.8-v1.24.2. + The KinD cluster name: airflow-python-3.9-v1.24.2 + The kubectl cluster name: kind-airflow-python-3.9-v1.24.2. KinD Cluster API server URL: http://localhost:48366 @@ -508,7 +508,7 @@ The virtualenv required will be created automatically when the scripts are run. .. code-block:: text - Running tests with kind-airflow-python-3.8-v1.24.2 cluster. + Running tests with kind-airflow-python-3.9-v1.24.2 cluster. Command to run: pytest kubernetes_tests ========================================================================================= test session starts ========================================================================================== platform darwin -- Python 3.9.9, pytest-6.2.5, py-1.11.0, pluggy-1.0.0 -- /Users/jarek/IdeaProjects/airflow/.build/.k8s-env/bin/python @@ -537,7 +537,7 @@ Once you enter the environment, you receive this information: Entering interactive k8s shell. - (kind-airflow-python-3.8-v1.24.2:KubernetesExecutor)> + (kind-airflow-python-3.9-v1.24.2:KubernetesExecutor)> In a separate terminal you can open the k9s CLI: @@ -647,9 +647,9 @@ Kind has also useful commands to inspect your running cluster: .. code-block:: text - Deleting KinD cluster airflow-python-3.8-v1.24.2! - Deleting cluster "airflow-python-3.8-v1.24.2" ... - KinD cluster airflow-python-3.8-v1.24.2 deleted! + Deleting KinD cluster airflow-python-3.9-v1.24.2! + Deleting cluster "airflow-python-3.9-v1.24.2" ... + KinD cluster airflow-python-3.9-v1.24.2 deleted! Running complete k8s tests diff --git a/contributing-docs/testing/unit_tests.rst b/contributing-docs/testing/unit_tests.rst index cd0b9ab00bf44..dff3835a02fba 100644 --- a/contributing-docs/testing/unit_tests.rst +++ b/contributing-docs/testing/unit_tests.rst @@ -209,7 +209,7 @@ rerun in Breeze as you will (``-n auto`` will parallelize tests using ``pytest-x .. code-block:: bash - breeze shell --backend none --python 3.8 + breeze shell --backend none --python 3.9 > pytest tests --skip-db-tests -n auto @@ -251,7 +251,7 @@ You can also run DB tests with ``breeze`` dockerized environment. You can choose ``--backend`` flag. The default is ``sqlite`` but you can also use others such as ``postgres`` or ``mysql``. You can also select backend version and Python version to use. You can specify the ``test-type`` to run - breeze will list the test types you can run with ``--help`` and provide auto-complete for them. Example -below runs the ``Core`` tests with ``postgres`` backend and ``3.8`` Python version: +below runs the ``Core`` tests with ``postgres`` backend and ``3.9`` Python version: We have a dedicated, opinionated ``breeze testing db-tests`` command as well that runs DB tests (it is also used in CI to run the DB tests, where you do not have to specify extra flags for @@ -286,7 +286,7 @@ either by package/module/test or by test type - whatever ``pytest`` supports. .. code-block:: bash - breeze shell --backend postgres --python 3.8 + breeze shell --backend postgres --python 3.9 > pytest tests --run-db-tests-only As explained before, you cannot run DB tests in parallel using ``pytest-xdist`` plugin, but ``breeze`` has @@ -296,7 +296,7 @@ you use ``breeze testing db-tests`` command): .. code-block:: bash - breeze testing tests --run-db-tests-only --backend postgres --python 3.8 --run-in-parallel + breeze testing tests --run-db-tests-only --backend postgres --python 3.9 --run-in-parallel Examples of marking test as DB test ................................... @@ -1133,7 +1133,7 @@ directly to the container. .. code-block:: bash - breeze ci-image build --python 3.8 + breeze ci-image build --python 3.9 2. Enter breeze environment by selecting the appropriate airflow version and choosing ``providers-and-tests`` option for ``--mount-sources`` flag. @@ -1241,7 +1241,7 @@ Herr id how to reproduce it. .. code-block:: bash - breeze ci-image build --python 3.8 + breeze ci-image build --python 3.9 2. Build providers from latest sources: diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md index e0e014cfcb13d..1f56def27646c 100644 --- a/dev/README_RELEASE_AIRFLOW.md +++ b/dev/README_RELEASE_AIRFLOW.md @@ -682,7 +682,7 @@ Optionally it can be followed with constraints ```shell script pip install apache-airflow==rc \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-/constraints-3.8.txt"` + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-/constraints-3.9.txt"` ``` Note that the constraints contain python version that you are installing it with. @@ -694,7 +694,7 @@ There is also an easy way of installation with Breeze if you have the latest sou Running the following command will use tmux inside breeze, create `admin` user and run Webserver & Scheduler: ```shell script -breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.8 --backend postgres +breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.9 --backend postgres ``` You can also choose different executors and extras to install when you are installing airflow this way. For @@ -702,7 +702,7 @@ example in order to run Airflow with CeleryExecutor and install celery, google a Airflow 2.7.0, you need to have celery provider installed to run Airflow with CeleryExecutor) you can run: ```shell script -breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.8 --backend postgres \ +breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.9 --backend postgres \ --executor CeleryExecutor --airflow-extras "celery,google,amazon" ``` @@ -838,7 +838,7 @@ the older branches, you should set the "skip" field to true. ## Verify production images ```shell script -for PYTHON in 3.8 3.9 3.10 3.11 3.12 +for PYTHON in 3.9 3.10 3.11 3.12 do docker pull apache/airflow:${VERSION}-python${PYTHON} breeze prod-image verify --image-name apache/airflow:${VERSION}-python${PYTHON} diff --git a/dev/README_RELEASE_PROVIDER_PACKAGES.md b/dev/README_RELEASE_PROVIDER_PACKAGES.md index 67d1bfa30fc07..dbb785200f74b 100644 --- a/dev/README_RELEASE_PROVIDER_PACKAGES.md +++ b/dev/README_RELEASE_PROVIDER_PACKAGES.md @@ -1031,7 +1031,7 @@ pip install apache-airflow-providers-==rc ### Installing with Breeze ```shell -breeze start-airflow --use-airflow-version 2.2.4 --python 3.8 --backend postgres \ +breeze start-airflow --use-airflow-version 2.2.4 --python 3.9 --backend postgres \ --load-example-dags --load-default-connections ``` diff --git a/dev/breeze/README.md b/dev/breeze/README.md index 15c0b66f57f46..d28cfd5353a2c 100644 --- a/dev/breeze/README.md +++ b/dev/breeze/README.md @@ -66,6 +66,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT. --------------------------------------------------------------------------------------------------------- -Package config hash: 8e382ff46231b261a569886a45480104eb5436434d2845c3eb011ee9dd4da3c2fa33f561eaa36f2245a29c8719ae2e86d7ffec39463c46e0b3b4bde56a27abe6 +Package config hash: 2ae1201c56227b6fcb599f020360a906100a80b32ed3a0d4927c8721e738afee3867f9ed567fd75ec9f368933c3a94c1336f8ab068f7892ed1ebe6244ccf20fe --------------------------------------------------------------------------------------------------------- diff --git a/dev/breeze/doc/01_installation.rst b/dev/breeze/doc/01_installation.rst index 7107be900d97b..1c7ad0ee62838 100644 --- a/dev/breeze/doc/01_installation.rst +++ b/dev/breeze/doc/01_installation.rst @@ -313,7 +313,7 @@ that Breeze works on .. warning:: Upgrading from earlier Python version - If you used Breeze with Python 3.7 and when running it, it will complain that it needs Python 3.8. In this + If you used Breeze with Python 3.8 and when running it, it will complain that it needs Python 3.9. In this case you should force-reinstall Breeze with ``pipx``: .. code-block:: bash @@ -342,14 +342,14 @@ that Breeze works on .. code-block:: bash - pipx reinstall --python /Users/airflow/.pyenv/versions/3.8.16/bin/python apache-airflow-breeze + pipx reinstall --python /Users/airflow/.pyenv/versions/3.9.16/bin/python apache-airflow-breeze Or you can uninstall breeze and install it with a specific python version: .. code-block:: bash pipx uninstall apache-airflow-breeze - pipx install -e ./dev/breeze --python /Users/airflow/.pyenv/versions/3.8.16/bin/python + pipx install -e ./dev/breeze --python /Users/airflow/.pyenv/versions/3.9.16/bin/python Running Breeze for the first time diff --git a/dev/breeze/doc/03_developer_tasks.rst b/dev/breeze/doc/03_developer_tasks.rst index 76f43606837e8..87bb2713b93fa 100644 --- a/dev/breeze/doc/03_developer_tasks.rst +++ b/dev/breeze/doc/03_developer_tasks.rst @@ -34,12 +34,12 @@ You can use additional ``breeze`` flags to choose your environment. You can spec version to use, and backend (the meta-data database). Thanks to that, with Breeze, you can recreate the same environments as we have in matrix builds in the CI. See next chapter for backend selection. -For example, you can choose to run Python 3.8 tests with MySQL as backend and with mysql version 8 +For example, you can choose to run Python 3.9 tests with MySQL as backend and with mysql version 8 as follows: .. code-block:: bash - breeze --python 3.8 --backend mysql --mysql-version 8.0 + breeze --python 3.9 --backend mysql --mysql-version 8.0 .. note:: Note for Windows WSL2 users @@ -55,7 +55,7 @@ Try adding ``--builder=default`` to your command. For example: .. code-block:: bash - breeze --builder=default --python 3.8 --backend mysql --mysql-version 8.0 + breeze --builder=default --python 3.9 --backend mysql --mysql-version 8.0 The choices you make are persisted in the ``./.build/`` cache directory so that next time when you use the ``breeze`` script, it could use the values that were used previously. This way you do not have to specify @@ -331,7 +331,7 @@ When you are starting airflow from local sources, www asset compilation is autom .. code-block:: bash - breeze --python 3.8 --backend mysql start-airflow + breeze --python 3.9 --backend mysql start-airflow You can also use it to start different executor. @@ -344,7 +344,7 @@ You can also use it to start any released version of Airflow from ``PyPI`` with .. code-block:: bash - breeze start-airflow --python 3.8 --backend mysql --use-airflow-version 2.7.0 + breeze start-airflow --python 3.9 --backend mysql --use-airflow-version 2.7.0 When you are installing version from PyPI, it's also possible to specify extras that should be used when installing Airflow - you can provide several extras separated by coma - for example to install diff --git a/dev/breeze/doc/06_managing_docker_images.rst b/dev/breeze/doc/06_managing_docker_images.rst index 294f1540f3667..bb4c4f9e06f62 100644 --- a/dev/breeze/doc/06_managing_docker_images.rst +++ b/dev/breeze/doc/06_managing_docker_images.rst @@ -140,10 +140,10 @@ suffix and they need to also be paired with corresponding runtime dependency add .. code-block:: bash - breeze prod-image build --python 3.8 --additional-dev-deps "libasound2-dev" \ + breeze prod-image build --python 3.9 --additional-dev-deps "libasound2-dev" \ --additional-runtime-apt-deps "libasound2" -Same as above but uses python 3.8. +Same as above but uses python 3.9. Building PROD image ................... diff --git a/dev/breeze/doc/10_advanced_breeze_topics.rst b/dev/breeze/doc/10_advanced_breeze_topics.rst index ac5421f85aa9a..a4f9384863009 100644 --- a/dev/breeze/doc/10_advanced_breeze_topics.rst +++ b/dev/breeze/doc/10_advanced_breeze_topics.rst @@ -33,8 +33,8 @@ For testing, you can create your own virtual environment, or use the one that `` already installed breeze following the recommended ``pipx install -e ./dev/breeze`` command. For local virtualenv, you can use ``pyenv`` or any other virtualenv wrapper. For example with ``pyenv``, -you can use ``pyenv virtualenv 3.8.6 airflow-breeze`` to create virtualenv called ``airflow-breeze`` -with Python 3.8.6. Then you can use ``pyenv activate airflow-breeze`` to activate it and install breeze +you can use ``pyenv virtualenv 3.9.6 airflow-breeze`` to create virtualenv called ``airflow-breeze`` +with Python 3.9.6. Then you can use ``pyenv activate airflow-breeze`` to activate it and install breeze in editable mode with ``pip install -e ./dev/breeze``. For ``pipx`` virtualenv, you can use the virtualenv that ``pipx`` created for you. You can find the name @@ -56,7 +56,7 @@ make sure to follow these steps: this will bypass the check we run in Breeze to see if there are new requirements to install for it See example configuration for PyCharm which has run/debug configuration for -``breeze sbom generate-providers-requirements --provider-id sqlite --python 3.8`` +``breeze sbom generate-providers-requirements --provider-id sqlite --python 3.9`` .. raw:: html diff --git a/dev/breeze/doc/adr/0002-implement-standalone-python-command.md b/dev/breeze/doc/adr/0002-implement-standalone-python-command.md index 37eebcf3e15d1..ddd005fd92dde 100644 --- a/dev/breeze/doc/adr/0002-implement-standalone-python-command.md +++ b/dev/breeze/doc/adr/0002-implement-standalone-python-command.md @@ -138,7 +138,7 @@ There are a few properties of Breeze/CI scripts that should be maintained though run a command and get everything done with the least number of prerequisites * The prerequisites for Breeze and CI are: - * Python 3.8+ (Python 3.8 end of life is October 2024) + * Python 3.9+ (Python 3.9 end of life is October 2025) * Docker (23.0+) * Docker Compose (2.16.0+) * No other tools and CLI commands should be needed diff --git a/dev/breeze/doc/ci/02_images.md b/dev/breeze/doc/ci/02_images.md index 6dfa8f350f828..8c699e43b5dbb 100644 --- a/dev/breeze/doc/ci/02_images.md +++ b/dev/breeze/doc/ci/02_images.md @@ -129,17 +129,17 @@ The images are built with default extras - different extras for CI and production image and you can change the extras via the `--airflow-extras` parameters and add new ones with `--additional-airflow-extras`. -For example if you want to build Python 3.8 version of production image +For example if you want to build Python 3.9 version of production image with "all" extras installed you should run this command: ``` bash -breeze prod-image build --python 3.8 --airflow-extras "all" +breeze prod-image build --python 3.9 --airflow-extras "all" ``` If you just want to add new extras you can add them like that: ``` bash -breeze prod-image build --python 3.8 --additional-airflow-extras "all" +breeze prod-image build --python 3.9 --additional-airflow-extras "all" ``` The command that builds the CI image is optimized to minimize the time @@ -160,7 +160,7 @@ You can also build production images from PIP packages via providing `--install-airflow-version` parameter to Breeze: ``` bash -breeze prod-image build --python 3.8 --additional-airflow-extras=trino --install-airflow-version=2.0.0 +breeze prod-image build --python 3.9 --additional-airflow-extras=trino --install-airflow-version=2.0.0 ``` This will build the image using command similar to: @@ -168,7 +168,7 @@ This will build the image using command similar to: ``` bash pip install \ apache-airflow[async,amazon,celery,cncf.kubernetes,docker,elasticsearch,ftp,grpc,hashicorp,http,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv]==2.0.0 \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.0/constraints-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.0/constraints-3.9.txt" ``` > [!NOTE] @@ -199,7 +199,7 @@ HEAD of development for constraints): ``` bash pip install "https://github.com/apache/airflow/archive/.tar.gz#egg=apache-airflow" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.8.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt" ``` You can also skip installing airflow and install it from locally @@ -207,7 +207,7 @@ provided files by using `--install-packages-from-context` parameter to Breeze: ``` bash -breeze prod-image build --python 3.8 --additional-airflow-extras=trino --install-packages-from-context +breeze prod-image build --python 3.9 --additional-airflow-extras=trino --install-packages-from-context ``` In this case you airflow and all packages (.whl files) should be placed @@ -241,20 +241,20 @@ flags: `registry` (default), `local`, or `disabled` flags when you run Breeze commands. For example: ``` bash -breeze ci-image build --python 3.8 --docker-cache local +breeze ci-image build --python 3.9 --docker-cache local ``` Will build the CI image using local build cache (note that it will take quite a long time the first time you run it). ``` bash -breeze prod-image build --python 3.8 --docker-cache registry +breeze prod-image build --python 3.9 --docker-cache registry ``` Will build the production image with cache used from registry. ``` bash -breeze prod-image build --python 3.8 --docker-cache disabled +breeze prod-image build --python 3.9 --docker-cache disabled ``` Will build the production image from the scratch. @@ -336,12 +336,12 @@ faster. It is enough to pass `--image-tag` and the registry and Breeze will download and execute commands using the same image that was used during the CI tests. -For example this command will run the same Python 3.8 image as was used +For example this command will run the same Python 3.9 image as was used in build identified with 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e commit SHA with enabled rabbitmq integration. ``` bash -breeze --image-tag 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e --python 3.8 --integration rabbitmq +breeze --image-tag 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e --python 3.9 --integration rabbitmq ``` You can see more details and examples in[Breeze](../README.rst) @@ -361,7 +361,7 @@ you can build the image in the Here just a few examples are presented which should give you general understanding of what you can customize. -This builds the production image in version 3.8 with additional airflow +This builds the production image in version 3.9 with additional airflow extras from 2.0.0 PyPI package and additional apt dev and runtime dependencies. @@ -373,7 +373,7 @@ plugin installed. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \ --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \ --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" \ @@ -384,7 +384,7 @@ the same image can be built using `breeze` (it supports auto-completion of the options): ``` bash -breeze ci-image build --python 3.8 --additional-airflow-extras=jdbc --additional-python-deps="pandas" \ +breeze ci-image build --python 3.9 --additional-airflow-extras=jdbc --additional-python-deps="pandas" \ --additional-dev-apt-deps="gcc g++" ``` @@ -398,7 +398,7 @@ comment](https://github.com/apache/airflow/issues/8605#issuecomment-690065621): ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="slack" \ --build-arg ADDITIONAL_PYTHON_DEPS="apache-airflow-providers-odbc \ @@ -423,8 +423,8 @@ can be used for CI images: | Build argument | Default value | Description | |-----------------------------------|-------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `PYTHON_BASE_IMAGE` | `python:3.8-slim-bookworm` | Base Python image | -| `PYTHON_MAJOR_MINOR_VERSION` | `3.8` | major/minor version of Python (should match base image) | +| `PYTHON_BASE_IMAGE` | `python:3.9-slim-bookworm` | Base Python image | +| `PYTHON_MAJOR_MINOR_VERSION` | `3.9` | major/minor version of Python (should match base image) | | `DEPENDENCIES_EPOCH_NUMBER` | `2` | increasing this number will reinstall all apt dependencies | | `ADDITIONAL_PIP_INSTALL_FLAGS` | | additional `pip` flags passed to the installation commands (except when reinstalling `pip` itself) | | `PIP_NO_CACHE_DIR` | `true` | if true, then no pip cache will be stored | @@ -455,59 +455,59 @@ can be used for CI images: Here are some examples of how CI images can built manually. CI is always built from local sources. -This builds the CI image in version 3.8 with default extras ("all"). +This builds the CI image in version 3.9 with default extras ("all"). ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" --tag my-image:0.0.1 + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.8 with "gcp" extra only. +This builds the CI image in version 3.9 with "gcp" extra only. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ --build-arg AIRFLOW_EXTRAS=gcp --tag my-image:0.0.1 ``` -This builds the CI image in version 3.8 with "apache-beam" extra added. +This builds the CI image in version 3.9 with "apache-beam" extra added. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="apache-beam" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.8 with "mssql" additional package +This builds the CI image in version 3.9 with "mssql" additional package added. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ --build-arg ADDITIONAL_PYTHON_DEPS="mssql" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.8 with "gcc" and "g++" additional +This builds the CI image in version 3.9 with "gcc" and "g++" additional apt dev dependencies added. ``` DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.8 with "jdbc" extra and +This builds the CI image in version 3.9 with "jdbc" extra and "default-jre-headless" additional apt runtime dependencies added. ``` DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ --build-arg AIRFLOW_EXTRAS=jdbc \ --tag my-image:0.0.1 ``` @@ -573,8 +573,7 @@ percent-encoded when you access them via UI (/ = %2F) | PROD image | airflow/\/prod/python\:\ | faster to build or pull. Production image optimized for size. | - \ might be either "main" or "v2-\*-test" -- \ - Python version (Major + Minor).Should be one of \["3.8", - "3.9", "3.10", "3.11", "3.12" \]. +- \ - Python version (Major + Minor).Should be one of \["3.9", "3.10", "3.11", "3.12" \]. - \ - full-length SHA of commit either from the tip of the branch (for pushes/schedule) or commit from the tip of the branch used for the PR. diff --git a/dev/breeze/doc/ci/04_selective_checks.md b/dev/breeze/doc/ci/04_selective_checks.md index 23131ec893948..e5894b0296875 100644 --- a/dev/breeze/doc/ci/04_selective_checks.md +++ b/dev/breeze/doc/ci/04_selective_checks.md @@ -169,8 +169,8 @@ Github Actions to pass the list of parameters to a command to execute | Output | Meaning of the output | Example value | List as string | |----------------------------------------|------------------------------------------------------------------------------------------------------|-------------------------------------------|----------------| | affected-providers-list-as-string | List of providers affected when they are selectively affected. | airbyte http | * | -| all-python-versions | List of all python versions there are available in the form of JSON array | ['3.8', '3.9', '3.10'] | | -| all-python-versions-list-as-string | List of all python versions there are available in the form of space separated string | 3.8 3.9 3.10 | * | +| all-python-versions | List of all python versions there are available in the form of JSON array | ['3.9', '3.10'] | | +| all-python-versions-list-as-string | List of all python versions there are available in the form of space separated string | 3.9 3.10 | * | | all-versions | If set to true, then all python, k8s, DB versions are used for tests. | false | | | basic-checks-only | Whether to run all static checks ("false") or only basic set of static checks ("true") | false | | | build_system_changed_in_pyproject_toml | When builds system dependencies changed in pyproject.toml changed in the PR. | false | | @@ -184,7 +184,7 @@ Github Actions to pass the list of parameters to a command to execute | default-kubernetes-version | Which Kubernetes version to use as default | v1.25.2 | | | default-mysql-version | Which MySQL version to use as default | 5.7 | | | default-postgres-version | Which Postgres version to use as default | 10 | | -| default-python-version | Which Python version to use as default | 3.8 | | +| default-python-version | Which Python version to use as default | 3.9 | | | docker-cache | Which cache should be used for images ("registry", "local" , "disabled") | registry | | | docs-build | Whether to build documentation ("true"/"false") | true | | | docs-list-as-string | What filter to apply to docs building - based on which documentation packages should be built | apache-airflow helm-chart google | | @@ -200,7 +200,7 @@ Github Actions to pass the list of parameters to a command to execute | is-self-hosted-runner | Whether the runner is self-hosted | false | | | is-vm-runner | Whether the runner uses VM to run | true | | | kind-version | Which Kind version to use for tests | v0.16.0 | | -| kubernetes-combos-list-as-string | All combinations of Python version and Kubernetes version to use for tests as space-separated string | 3.8-v1.25.2 3.9-v1.26.4 | * | +| kubernetes-combos-list-as-string | All combinations of Python version and Kubernetes version to use for tests as space-separated string | 3.9-v1.25.2 3.9-v1.26.4 | * | | kubernetes-versions | All Kubernetes versions to use for tests as JSON array | ['v1.25.2'] | | | kubernetes-versions-list-as-string | All Kubernetes versions to use for tests as space-separated string | v1.25.2 | * | | mypy-folders | List of folders to be considered for mypy | [] | | @@ -219,8 +219,8 @@ Github Actions to pass the list of parameters to a command to execute | prod-image-build | Whether PROD image build is needed | true | | | providers-compatibility-checks | List of dicts: (python_version, airflow_version, removed_providers) for compatibility checks | [] | | | pyproject-toml-changed | When pyproject.toml changed in the PR. | false | | -| python-versions | List of python versions to use for that build | ['3.8'] | * | -| python-versions-list-as-string | Which versions of MySQL to use for tests as space-separated string | 3.8 | * | +| python-versions | List of python versions to use for that build | ['3.9'] | * | +| python-versions-list-as-string | Which versions of MySQL to use for tests as space-separated string | 3.9 | * | | run-amazon-tests | Whether Amazon tests should be run ("true"/"false") | true | | | run-kubernetes-tests | Whether Kubernetes tests should be run ("true"/"false") | true | | | run-tests | Whether unit tests should be run ("true"/"false") | true | | diff --git a/dev/breeze/doc/ci/08_running_ci_locally.md b/dev/breeze/doc/ci/08_running_ci_locally.md index 6e1cbb0917536..cc9c89954df3e 100644 --- a/dev/breeze/doc/ci/08_running_ci_locally.md +++ b/dev/breeze/doc/ci/08_running_ci_locally.md @@ -72,19 +72,19 @@ For example knowing that the CI job was for commit `cd27124534b46c9688a1d89e75fcd137ab5137e3`: ``` bash -docker pull ghcr.io/apache/airflow/main/ci/python3.8:cd27124534b46c9688a1d89e75fcd137ab5137e3 +docker pull ghcr.io/apache/airflow/main/ci/python3.9:cd27124534b46c9688a1d89e75fcd137ab5137e3 -docker run -it ghcr.io/apache/airflow/main/ci/python3.8:cd27124534b46c9688a1d89e75fcd137ab5137e3 +docker run -it ghcr.io/apache/airflow/main/ci/python3.9:cd27124534b46c9688a1d89e75fcd137ab5137e3 ``` But you usually need to pass more variables and complex setup if you want to connect to a database or enable some integrations. Therefore it is easiest to use [Breeze](../README.rst) for that. For -example if you need to reproduce a MySQL environment in python 3.8 +example if you need to reproduce a MySQL environment in python 3.9 environment you can run: ``` bash -breeze --image-tag cd27124534b46c9688a1d89e75fcd137ab5137e3 --python 3.8 --backend mysql +breeze --image-tag cd27124534b46c9688a1d89e75fcd137ab5137e3 --python 3.9 --backend mysql ``` You will be dropped into a shell with the exact version that was used diff --git a/dev/breeze/doc/images/output-commands.svg b/dev/breeze/doc/images/output-commands.svg index 78c753526e449..f80a72a9fbc54 100644 --- a/dev/breeze/doc/images/output-commands.svg +++ b/dev/breeze/doc/images/output-commands.svg @@ -302,8 +302,8 @@ ╭─ Execution mode ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ --python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               +(>3.9< | 3.10 | 3.11 | 3.12)                                 +[default: 3.9]                                               --integrationIntegration(s) to enable when running (can be more than one).                        (all | all-testable | cassandra | celery | drill | kafka | kerberos | mongo | mssql  | openlineage | otel | pinot | qdrant | redis | statsd | trino | ydb)                diff --git a/dev/breeze/doc/images/output_ci-image_build.svg b/dev/breeze/doc/images/output_ci-image_build.svg index 131b618e403ce..6dd856c3dc8db 100644 --- a/dev/breeze/doc/images/output_ci-image_build.svg +++ b/dev/breeze/doc/images/output_ci-image_build.svg @@ -352,8 +352,8 @@ ╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ --python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               +(>3.9< | 3.10 | 3.11 | 3.12)                                 +[default: 3.9]                                               --upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. --upgrade-on-failure/--no-upgrade-on-failureWhen set, attempt to run upgrade to newer dependencies when        regular build fails. It is set to False by default on CI and True  @@ -380,7 +380,7 @@ (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       --python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.8 3.9 3.10 3.11 3.12]                                              +[default: 3.9 3.10 3.11 3.12]                                                  --run-in-parallelRun the operation in parallel on all or selected subset of parameters. --skip-cleanupSkip cleanup of temporary files created during parallel run. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_ci-image_build.txt b/dev/breeze/doc/images/output_ci-image_build.txt index b56ee7c68abb5..905847bedc783 100644 --- a/dev/breeze/doc/images/output_ci-image_build.txt +++ b/dev/breeze/doc/images/output_ci-image_build.txt @@ -1 +1 @@ -d64cda52af48f5c2dc704fac8a738d49 +686950c27e41fa50cf22abb8c25e496a diff --git a/dev/breeze/doc/images/output_ci-image_pull.svg b/dev/breeze/doc/images/output_ci-image_pull.svg index b0ebd7a1ea5f1..a784b4c9a2389 100644 --- a/dev/breeze/doc/images/output_ci-image_pull.svg +++ b/dev/breeze/doc/images/output_ci-image_pull.svg @@ -1,4 +1,4 @@ - + ╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ --image-tag-tTag of the image which is used to pull the image.(TEXT)[default: latest] ---python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               ---verifyVerify image. ---wait-for-imageWait until image is available. ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build -or pull image with --image-tag.                                                                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of parameters. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.8 3.9 3.10 3.11 3.12]                                              ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Github authentication ──────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---github-tokenThe token used to authenticate to GitHub.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12) +[default: 3.9]                                               +--verifyVerify image. +--wait-for-imageWait until image is available. +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you build +or pull image with --image-tag.                                                                +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of parameters. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.9 3.10 3.11 3.12]                                                  +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Github authentication ──────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--github-tokenThe token used to authenticate to GitHub.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_ci-image_pull.txt b/dev/breeze/doc/images/output_ci-image_pull.txt index c916be1bf34eb..aac2c46f1ad44 100644 --- a/dev/breeze/doc/images/output_ci-image_pull.txt +++ b/dev/breeze/doc/images/output_ci-image_pull.txt @@ -1 +1 @@ -3c950cd0e358661163e52c9f3726aee8 +eadf5c7b6661cf557c2ab5b38e462481 diff --git a/dev/breeze/doc/images/output_ci-image_verify.svg b/dev/breeze/doc/images/output_ci-image_verify.svg index 61a73a5df55a4..23acf81511254 100644 --- a/dev/breeze/doc/images/output_ci-image_verify.svg +++ b/dev/breeze/doc/images/output_ci-image_verify.svg @@ -157,8 +157,8 @@ ╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ --image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10 | 3.11 | 3.12) -[default: 3.8]                                               +--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12) +[default: 3.9]                                               --image-tag-tTag of the image when verifying it.(TEXT)[default: latest] --pullPull image is missing before attempting to verify it. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ @@ -168,7 +168,7 @@ (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       --python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.8 3.9 3.10 3.11 3.12]                                              +[default: 3.9 3.10 3.11 3.12]                                                  --skip-cleanupSkip cleanup of temporary files created during parallel run. --debug-resourcesWhether to show resource information while running in parallel. --include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). diff --git a/dev/breeze/doc/images/output_ci-image_verify.txt b/dev/breeze/doc/images/output_ci-image_verify.txt index bad4f7d8a8bae..3d2f72ffa6857 100644 --- a/dev/breeze/doc/images/output_ci-image_verify.txt +++ b/dev/breeze/doc/images/output_ci-image_verify.txt @@ -1 +1 @@ -ec059842b7c4c41db33a6362a361b0ef +324f4d4949eef209e991d890ddf2ff4f diff --git a/dev/breeze/doc/images/output_k8s_build-k8s-image.svg b/dev/breeze/doc/images/output_k8s_build-k8s-image.svg index 8708f57a34e41..eff84069b0f97 100644 --- a/dev/breeze/doc/images/output_k8s_build-k8s-image.svg +++ b/dev/breeze/doc/images/output_k8s_build-k8s-image.svg @@ -157,8 +157,8 @@ ╭─ Build image flags ──────────────────────────────────────────────────────────────────────────────────────────────────╮ --python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               +(>3.9< | 3.10 | 3.11 | 3.12)                                 +[default: 3.9]                                               --image-tag-tImage tag used to build K8S image from.(TEXT)[default: latest] --rebuild-base-imageRebuilds base Airflow image before building K8S image. --copy-local-sources/--no-copy-local-sourcesCopy local sources to the image.[default: copy-local-sources] @@ -171,7 +171,7 @@ (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       --python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.8 3.9 3.10 3.11 3.12]                                              +[default: 3.9 3.10 3.11 3.12]                                                  --skip-cleanupSkip cleanup of temporary files created during parallel run. --debug-resourcesWhether to show resource information while running in parallel. --include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). diff --git a/dev/breeze/doc/images/output_k8s_build-k8s-image.txt b/dev/breeze/doc/images/output_k8s_build-k8s-image.txt index 0afa131e5c16b..dd6b1b79cdf6c 100644 --- a/dev/breeze/doc/images/output_k8s_build-k8s-image.txt +++ b/dev/breeze/doc/images/output_k8s_build-k8s-image.txt @@ -1 +1 @@ -f9669ae229dfd2954ae7bf6f66bb92bf +7e4553f3179fe40e5c05bfc556cb2355 diff --git a/dev/breeze/doc/images/output_k8s_configure-cluster.svg b/dev/breeze/doc/images/output_k8s_configure-cluster.svg index 2ecb3db7ecb11..b46a5fd1175a5 100644 --- a/dev/breeze/doc/images/output_k8s_configure-cluster.svg +++ b/dev/breeze/doc/images/output_k8s_configure-cluster.svg @@ -1,4 +1,4 @@ - + parallel). ╭─ Configure cluster flags ────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             -[default: v1.28.13]                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of parameters. ---parallelismMaximum number of processes to use while running the operation in parallel for cluster  -operations.                                                                             -(INTEGER RANGE)                                                                         -[default: 2; 1<=x<=4]                                                                   ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.8 3.9 3.10 3.11 3.12]                                              ---kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) -[default: v1.28.13 v1.29.8 v1.30.4 v1.31.0]                    ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12) +[default: 3.9]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             +[default: v1.28.13]                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of parameters. +--parallelismMaximum number of processes to use while running the operation in parallel for cluster  +operations.                                                                             +(INTEGER RANGE)                                                                         +[default: 2; 1<=x<=4]                                                                   +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.9 3.10 3.11 3.12]                                                  +--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) +[default: v1.28.13 v1.29.8 v1.30.4 v1.31.0]                    +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_k8s_configure-cluster.txt b/dev/breeze/doc/images/output_k8s_configure-cluster.txt index ac398e9fbce58..7ed7f0e99146a 100644 --- a/dev/breeze/doc/images/output_k8s_configure-cluster.txt +++ b/dev/breeze/doc/images/output_k8s_configure-cluster.txt @@ -1 +1 @@ -c175cc4375923707f1aa91074df42c37 +6e0e5ed30fcde18abe30f7dab1c4f0b6 diff --git a/dev/breeze/doc/images/output_k8s_create-cluster.svg b/dev/breeze/doc/images/output_k8s_create-cluster.svg index 6d3f18d341a20..04176b416c166 100644 --- a/dev/breeze/doc/images/output_k8s_create-cluster.svg +++ b/dev/breeze/doc/images/output_k8s_create-cluster.svg @@ -160,8 +160,8 @@ ╭─ K8S cluster creation flags ─────────────────────────────────────────────────────────────────────────────────────────╮ --python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               +(>3.9< | 3.10 | 3.11 | 3.12)                                 +[default: 3.9]                                               --kubernetes-versionKubernetes version used to create the KinD cluster of. (>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             [default: v1.28.13]                                    @@ -174,7 +174,7 @@ (INTEGER RANGE)                                                                         [default: 2; 1<=x<=4]                                                                   --python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.8 3.9 3.10 3.11 3.12]                                              +[default: 3.9 3.10 3.11 3.12]                                                  --kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) [default: v1.28.13 v1.29.8 v1.30.4 v1.31.0]                    --skip-cleanupSkip cleanup of temporary files created during parallel run. diff --git a/dev/breeze/doc/images/output_k8s_create-cluster.txt b/dev/breeze/doc/images/output_k8s_create-cluster.txt index 29557703705c2..e9e52f4a391e9 100644 --- a/dev/breeze/doc/images/output_k8s_create-cluster.txt +++ b/dev/breeze/doc/images/output_k8s_create-cluster.txt @@ -1 +1 @@ -98956f766fe2b98109626b909082e4f3 +254a3805eeb108125d078abecf9226fb diff --git a/dev/breeze/doc/images/output_k8s_delete-cluster.svg b/dev/breeze/doc/images/output_k8s_delete-cluster.svg index 83efc5b324a8e..ebb3878309bb5 100644 --- a/dev/breeze/doc/images/output_k8s_delete-cluster.svg +++ b/dev/breeze/doc/images/output_k8s_delete-cluster.svg @@ -1,4 +1,4 @@ - + Delete the current KinD Cluster (optionally all clusters). ╭─ K8S cluster delete flags ───────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             -[default: v1.28.13]                                    ---allApply it to all created clusters -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12) +[default: 3.9]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             +[default: v1.28.13]                                    +--allApply it to all created clusters +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_k8s_delete-cluster.txt b/dev/breeze/doc/images/output_k8s_delete-cluster.txt index 9057ac44c262f..3c0998b1f3cde 100644 --- a/dev/breeze/doc/images/output_k8s_delete-cluster.txt +++ b/dev/breeze/doc/images/output_k8s_delete-cluster.txt @@ -1 +1 @@ -ab4aacbb44ee638a91f4ff225cb1dbfa +11cd0c89e39c35786fb33d00fbb927fc diff --git a/dev/breeze/doc/images/output_k8s_deploy-airflow.svg b/dev/breeze/doc/images/output_k8s_deploy-airflow.svg index 034b87a27dacf..1ad641587467d 100644 --- a/dev/breeze/doc/images/output_k8s_deploy-airflow.svg +++ b/dev/breeze/doc/images/output_k8s_deploy-airflow.svg @@ -181,8 +181,8 @@ ╭─ Airflow deploy flags ───────────────────────────────────────────────────────────────────────────────────────────────╮ --python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               +(>3.9< | 3.10 | 3.11 | 3.12)                                 +[default: 3.9]                                               --kubernetes-versionKubernetes version used to create the KinD cluster of. (>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             [default: v1.28.13]                                    @@ -202,7 +202,7 @@ (INTEGER RANGE)                                                                         [default: 2; 1<=x<=4]                                                                   --python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.8 3.9 3.10 3.11 3.12]                                              +[default: 3.9 3.10 3.11 3.12]                                                  --kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) [default: v1.28.13 v1.29.8 v1.30.4 v1.31.0]                    --skip-cleanupSkip cleanup of temporary files created during parallel run. diff --git a/dev/breeze/doc/images/output_k8s_deploy-airflow.txt b/dev/breeze/doc/images/output_k8s_deploy-airflow.txt index 8029f4f3e1ac6..207f911525643 100644 --- a/dev/breeze/doc/images/output_k8s_deploy-airflow.txt +++ b/dev/breeze/doc/images/output_k8s_deploy-airflow.txt @@ -1 +1 @@ -640037e62b7ed467a7a8134046444da2 +027d1bd159cdc426dd4a64051bf2dd8d diff --git a/dev/breeze/doc/images/output_k8s_k9s.svg b/dev/breeze/doc/images/output_k8s_k9s.svg index ed98b978fdcbd..55a6d16fbb127 100644 --- a/dev/breeze/doc/images/output_k8s_k9s.svg +++ b/dev/breeze/doc/images/output_k8s_k9s.svg @@ -1,4 +1,4 @@ - + ╭─ K8S k9s flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ --use-dockerUse Docker to start k8s executor (otherwise k9s from PATH is used and only run with docker if not found on PATH).                                                                     ---python-pPython major/minor version used in Airflow image for images. -(>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           -[default: 3.8]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             -[default: v1.28.13]                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +--python-pPython major/minor version used in Airflow image for images.(>3.9< | 3.10 | 3.11 | 3.12) +[default: 3.9]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.28.13< | v1.29.8 | v1.30.4 | v1.31.0)             +[default: v1.28.13]                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_k8s_k9s.txt b/dev/breeze/doc/images/output_k8s_k9s.txt index a6d65c87c5166..41cb876d4b85f 100644 --- a/dev/breeze/doc/images/output_k8s_k9s.txt +++ b/dev/breeze/doc/images/output_k8s_k9s.txt @@ -1 +1 @@ -52c49a84f6fa6752bbdda1a47765d117 +4330a248804808ca67646fd429521ea4 diff --git a/dev/breeze/doc/images/output_k8s_logs.svg b/dev/breeze/doc/images/output_k8s_logs.svg index fc2daaab41a44..706b4b621c177 100644 --- a/dev/breeze/doc/images/output_k8s_logs.svg +++ b/dev/breeze/doc/images/output_k8s_logs.svg @@ -1,4 +1,4 @@ - +
+ + + Next Run + + {Boolean(dag.next_dagrun) ? ( + {dag.next_dagrun} + ) : undefined} + {Boolean(dag.timetable_summary) ? ( + + + {" "} + {" "} + {dag.timetable_summary} + + + ) : undefined} + +
+
+ + + ); +}; diff --git a/airflow/ui/src/pages/DagsList/DagsList.tsx b/airflow/ui/src/pages/DagsList/DagsList.tsx index 90981a0747643..7b87e8d253fb2 100644 --- a/airflow/ui/src/pages/DagsList/DagsList.tsx +++ b/airflow/ui/src/pages/DagsList/DagsList.tsx @@ -21,21 +21,24 @@ import { Heading, HStack, Select, - Spinner, + Skeleton, VStack, } from "@chakra-ui/react"; import type { ColumnDef } from "@tanstack/react-table"; -import { type ChangeEventHandler, useCallback } from "react"; +import { type ChangeEventHandler, useCallback, useState } from "react"; import { useSearchParams } from "react-router-dom"; import { useDagServiceGetDags } from "openapi/queries"; import type { DAGResponse } from "openapi/requests/types.gen"; import { DataTable } from "src/components/DataTable"; +import { ToggleTableDisplay } from "src/components/DataTable/ToggleTableDisplay"; +import type { CardDef } from "src/components/DataTable/types"; import { useTableURLState } from "src/components/DataTable/useTableUrlState"; import { SearchBar } from "src/components/SearchBar"; import { TogglePause } from "src/components/TogglePause"; import { pluralize } from "src/utils/pluralize"; +import { DagCard } from "./DagCard"; import { DagsFilters } from "./DagsFilters"; const columns: Array> = [ @@ -49,6 +52,9 @@ const columns: Array> = [ ), enableSorting: false, header: "", + meta: { + skeletonWidth: 10, + }, }, { accessorKey: "dag_id", @@ -83,10 +89,18 @@ const columns: Array> = [ }, ]; +const cardDef: CardDef = { + card: ({ row }) => , + meta: { + customSkeleton: , + }, +}; + const PAUSED_PARAM = "paused"; -export const DagsList = ({ cardView = false }) => { +export const DagsList = () => { const [searchParams] = useSearchParams(); + const [display, setDisplay] = useState<"card" | "table">("card"); const showPaused = searchParams.get(PAUSED_PARAM); @@ -97,7 +111,7 @@ export const DagsList = ({ cardView = false }) => { const [sort] = sorting; const orderBy = sort ? `${sort.desc ? "-" : ""}${sort.id}` : undefined; - const { data, isLoading } = useDagServiceGetDags({ + const { data, isFetching, isLoading } = useDagServiceGetDags({ limit: pagination.pageSize, offset: pagination.pageIndex * pagination.pageSize, onlyActive: true, @@ -119,44 +133,47 @@ export const DagsList = ({ cardView = false }) => { return ( <> - {isLoading ? : undefined} - {!isLoading && Boolean(data?.dags) && ( - <> - - - - - - {pluralize("DAG", data?.total_entries)} - - {cardView ? ( - - ) : ( - false - )} - - - - - )} + + + + + + {pluralize("DAG", data?.total_entries)} + + {display === "card" ? ( + + ) : ( + false + )} + + + + ); }; diff --git a/airflow/ui/src/theme.ts b/airflow/ui/src/theme.ts index e172bf76508d1..06a3b10cc7fcf 100644 --- a/airflow/ui/src/theme.ts +++ b/airflow/ui/src/theme.ts @@ -24,39 +24,33 @@ import { createMultiStyleConfigHelpers, extendTheme } from "@chakra-ui/react"; const { defineMultiStyleConfig, definePartsStyle } = createMultiStyleConfigHelpers(tableAnatomy.keys); -const baseStyle = definePartsStyle((props) => { - const { colorMode, colorScheme } = props; - - return { - tbody: { - tr: { - "&:nth-of-type(even)": { - "th, td": { - borderBottomWidth: "0px", - }, +const baseStyle = definePartsStyle(() => ({ + tbody: { + tr: { + "&:nth-of-type(even)": { + "th, td": { + borderBottomWidth: "0px", + }, + }, + "&:nth-of-type(odd)": { + td: { + background: "subtle-bg", }, - "&:nth-of-type(odd)": { - td: { - background: - colorMode === "light" ? `${colorScheme}.50` : `gray.900`, - }, - "th, td": { - borderBottomWidth: "0px", - borderColor: - colorMode === "light" ? `${colorScheme}.50` : `gray.900`, - }, + "th, td": { + borderBottomWidth: "0px", + borderColor: "subtle-bg", }, }, }, - thead: { - tr: { - th: { - borderBottomWidth: 0, - }, + }, + thead: { + tr: { + th: { + borderBottomWidth: 0, }, }, - }; -}); + }, +})); export const tableTheme = defineMultiStyleConfig({ baseStyle }); @@ -72,6 +66,12 @@ const theme = extendTheme({ config: { useSystemColorMode: true, }, + semanticTokens: { + colors: { + "subtle-bg": { _dark: "gray.900", _light: "blue.50" }, + "subtle-text": { _dark: "blue.500", _light: "blue.600" }, + }, + }, styles: { global: { "*, *::before, &::after": { From 2933a653342a075046fe126277557d09a5209e81 Mon Sep 17 00:00:00 2001 From: Pierre Jeambrun Date: Tue, 8 Oct 2024 19:29:36 +0800 Subject: [PATCH 017/125] Remove more burdensome eslint rules (#42819) --- airflow/ui/rules/core.js | 7 ------- airflow/ui/src/components/DataTable/searchParams.ts | 1 - 2 files changed, 8 deletions(-) diff --git a/airflow/ui/rules/core.js b/airflow/ui/rules/core.js index 54011b1ce7da6..3c5133236e98a 100644 --- a/airflow/ui/rules/core.js +++ b/airflow/ui/rules/core.js @@ -291,13 +291,6 @@ export const coreRules = /** @type {const} @satisfies {FlatConfig.Config} */ ({ */ "max-nested-callbacks": [ERROR, { max: 3 }], - /** - * Enforce a maximum number of statements allowed in function blocks to 10. - * - * @see [max-statements](https://eslint.org/docs/latest/rules/max-statements) - */ - "max-statements": [WARN, { max: 10 }], - /** * Disallow use of `alert`, `confirm`, and `prompt`. * diff --git a/airflow/ui/src/components/DataTable/searchParams.ts b/airflow/ui/src/components/DataTable/searchParams.ts index 80bb6e967afaa..8cc57ad7b5bcf 100644 --- a/airflow/ui/src/components/DataTable/searchParams.ts +++ b/airflow/ui/src/components/DataTable/searchParams.ts @@ -24,7 +24,6 @@ export const LIMIT_PARAM = "limit"; export const OFFSET_PARAM = "offset"; export const SORT_PARAM = "sort"; -// eslint-disable-next-line max-statements export const stateToSearchParams = ( state: TableState, defaultTableState?: TableState, From 061b0a580dad1e4972a57f118015d33d2285e816 Mon Sep 17 00:00:00 2001 From: GPK Date: Tue, 8 Oct 2024 14:42:12 +0100 Subject: [PATCH 018/125] Updating prometheus statsd exporter version from v0.27.2-pr579 to v0.27.2 and pre-commit-hooks to v5.0.0 (#42809) --- .pre-commit-config.yaml | 2 +- chart/values.schema.json | 2 +- chart/values.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e33038432b541..a568c79c265aa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -240,7 +240,7 @@ repos: alias: blacken-docs additional_dependencies: [black==23.10.0] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: check-merge-conflict name: Check that merge conflicts are not being committed diff --git a/chart/values.schema.json b/chart/values.schema.json index d8b5de41c8eb8..6cf6fea9f685a 100644 --- a/chart/values.schema.json +++ b/chart/values.schema.json @@ -841,7 +841,7 @@ "tag": { "description": "The StatsD image tag.", "type": "string", - "default": "v0.27.2-pr579" + "default": "v0.27.2" }, "pullPolicy": { "description": "The StatsD image pull policy.", diff --git a/chart/values.yaml b/chart/values.yaml index 0edb9f2bd7cd3..f18d971fe68c9 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -105,7 +105,7 @@ images: pullPolicy: IfNotPresent statsd: repository: quay.io/prometheus/statsd-exporter - tag: v0.27.2-pr579 + tag: v0.27.2 pullPolicy: IfNotPresent redis: repository: redis From 5550cc68d765fc8d535417639e88d7fd04062335 Mon Sep 17 00:00:00 2001 From: Daniel Standish <15932138+dstandish@users.noreply.github.com> Date: Tue, 8 Oct 2024 07:23:11 -0700 Subject: [PATCH 019/125] Specify asyncio_default_fixture_loop_scope in pytest.ini (#42827) Previously we were getting 'PytestDeprecationWarning: The configuration option "asyncio_default_fixture_loop_scope" is unset.' This sets it to the new default, and makes the warning go away. --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 63a26fd513424..bc5842fc51c68 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -558,6 +558,9 @@ python_files = [ testpaths = [ "tests", ] + +asyncio_default_fixture_loop_scope = "function" + # Keep temporary directories (created by `tmp_path`) for 2 recent runs only failed tests. tmp_path_retention_count = "2" tmp_path_retention_policy = "failed" From 49b172f73f8f3dd1cab069fed3863b917b2eb1c4 Mon Sep 17 00:00:00 2001 From: Everton Seiei Arakaki Date: Tue, 8 Oct 2024 16:32:22 +0200 Subject: [PATCH 020/125] feat: allow customizing podManagementPolicy (#42673) * feat: allow customizing podManagementPolicy * fix: adjust schema and identation * ci: adjust lint --------- Co-authored-by: Arakaki, Everton --- chart/templates/workers/worker-deployment.yaml | 3 +++ chart/values.schema.json | 12 ++++++++++++ chart/values.yaml | 3 +++ 3 files changed, 18 insertions(+) diff --git a/chart/templates/workers/worker-deployment.yaml b/chart/templates/workers/worker-deployment.yaml index 23852d3427522..5499e64ebece5 100644 --- a/chart/templates/workers/worker-deployment.yaml +++ b/chart/templates/workers/worker-deployment.yaml @@ -74,6 +74,9 @@ spec: tier: airflow component: worker release: {{ .Release.Name }} + {{- if and $persistence .Values.workers.podManagementPolicy }} + podManagementPolicy: {{ .Values.workers.podManagementPolicy }} + {{- end }} {{- if and $persistence .Values.workers.updateStrategy }} updateStrategy: {{- toYaml .Values.workers.updateStrategy | nindent 4 }} {{- end }} diff --git a/chart/values.schema.json b/chart/values.schema.json index 6cf6fea9f685a..0f93773dfded9 100644 --- a/chart/values.schema.json +++ b/chart/values.schema.json @@ -1598,6 +1598,18 @@ ], "default": null }, + "podManagementPolicy": { + "description": "Specifies the policy for managing pods within the worker. Only applicable to StatefulSet.", + "type": [ + "null", + "string" + ], + "default": null, + "enum": [ + "OrderedReady", + "Parallel" + ] + }, "strategy": { "description": "Specifies the strategy used to replace old Pods by new ones when deployed as a Deployment.", "type": [ diff --git a/chart/values.yaml b/chart/values.yaml index f18d971fe68c9..9d4bcf2ca0709 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -590,6 +590,9 @@ workers: maxSurge: "100%" maxUnavailable: "50%" + # Allow relaxing ordering guarantees while preserving its uniqueness and identity + # podManagementPolicy: Parallel + # When not set, the values defined in the global securityContext will be used securityContext: {} # runAsUser: 50000 From 6c75785fac20a98651d92e8278b7052f5545ee60 Mon Sep 17 00:00:00 2001 From: Daniel Standish <15932138+dstandish@users.noreply.github.com> Date: Tue, 8 Oct 2024 08:34:43 -0700 Subject: [PATCH 021/125] Fix import sequencing of backfill and dagrun models (#42828) Previously it worked in general, but did not in certain test scenarios where we did not initialize the database. This is one solution. --- airflow/models/backfill.py | 6 ++++-- airflow/models/dagrun.py | 3 ++- tests/cli/conftest.py | 4 ++-- tests/conftest.py | 6 ------ 4 files changed, 8 insertions(+), 11 deletions(-) diff --git a/airflow/models/backfill.py b/airflow/models/backfill.py index e8c31015c27b6..37683ee6f1e55 100644 --- a/airflow/models/backfill.py +++ b/airflow/models/backfill.py @@ -32,9 +32,7 @@ from airflow.api_connexion.exceptions import Conflict, NotFound from airflow.exceptions import AirflowException -from airflow.models import DagRun from airflow.models.base import Base, StringID -from airflow.models.serialized_dag import SerializedDagModel from airflow.settings import json from airflow.utils import timezone from airflow.utils.session import create_session @@ -129,6 +127,8 @@ def _create_backfill( reverse: bool, dag_run_conf: dict | None, ) -> Backfill | None: + from airflow.models.serialized_dag import SerializedDagModel + with create_session() as session: serdag = session.get(SerializedDagModel, dag_id) if not serdag: @@ -215,6 +215,8 @@ def _cancel_backfill(backfill_id) -> Backfill: session.commit() + from airflow.models import DagRun + # now, let's mark all queued dag runs as failed query = ( update(DagRun) diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index 3abf16b7f9ee3..0373bc667bfc6 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -57,6 +57,7 @@ from airflow.listeners.listener import get_listener_manager from airflow.models import Log from airflow.models.abstractoperator import NotMapped +from airflow.models.backfill import Backfill from airflow.models.base import Base, StringID from airflow.models.expandinput import NotFullyPopulated from airflow.models.taskinstance import TaskInstance as TI @@ -207,7 +208,7 @@ class DagRun(Base, LoggingMixin): uselist=False, cascade="all, delete, delete-orphan", ) - backfill = relationship("Backfill", uselist=False) + backfill = relationship(Backfill, uselist=False) backfill_max_active_runs = association_proxy("backfill", "max_active_runs") max_active_runs = association_proxy("dag_model", "max_active_runs") diff --git a/tests/cli/conftest.py b/tests/cli/conftest.py index 9987afb6833ab..9f0a63af06978 100644 --- a/tests/cli/conftest.py +++ b/tests/cli/conftest.py @@ -21,9 +21,9 @@ import pytest -from airflow import models from airflow.cli import cli_parser from airflow.executors import local_executor +from airflow.models.dagbag import DagBag from airflow.providers.celery.executors import celery_executor, celery_kubernetes_executor from airflow.providers.cncf.kubernetes.executors import kubernetes_executor, local_kubernetes_executor from tests.test_utils.config import conf_vars @@ -56,7 +56,7 @@ def load_examples(): @pytest.fixture(scope="session") def dagbag(): - return models.DagBag(include_examples=True) + return DagBag(include_examples=True) @pytest.fixture(scope="session") diff --git a/tests/conftest.py b/tests/conftest.py index d0a34a6e00524..60d009416fe8e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -412,12 +412,6 @@ def initialize_airflow_tests(request): "Skipping initializing of the DB as it was initialized already.\n" "You can re-initialize the database by adding --with-db-init flag when running tests." ) - else: - # if we are not initializing the database (due to skip db tests) - # we need to ensure Backfill is defined before DagRun - # otherwise we get this error: - # "sqlalchemy.exc.InvalidRequestError: When initializing mapper mapped class..." - from airflow.models.backfill import Backfill # noqa: F401 integration_kerberos = os.environ.get("INTEGRATION_KERBEROS") if integration_kerberos == "true": # Initialize kerberos From c377e7fcebcbf2b2752e937d9b797864329333ef Mon Sep 17 00:00:00 2001 From: Kalyan R Date: Tue, 8 Oct 2024 21:40:18 +0530 Subject: [PATCH 022/125] add warning log when task_key>100 (#42813) related to #41816 Adds a warning log to indicate failure if length of task_key>100. --- airflow/providers/databricks/operators/databricks.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/airflow/providers/databricks/operators/databricks.py b/airflow/providers/databricks/operators/databricks.py index d9256477f9495..58ffaeaece582 100644 --- a/airflow/providers/databricks/operators/databricks.py +++ b/airflow/providers/databricks/operators/databricks.py @@ -1045,7 +1045,15 @@ def _get_hook(self, caller: str) -> DatabricksHook: def _get_databricks_task_id(self, task_id: str) -> str: """Get the databricks task ID using dag_id and task_id. Removes illegal characters.""" - return f"{self.dag_id}__{task_id.replace('.', '__')}" + task_id = f"{self.dag_id}__{task_id.replace('.', '__')}" + if len(task_id) > 100: + self.log.warning( + "The generated task_key '%s' exceeds 100 characters and will be truncated by the Databricks API. " + "This will cause failure when trying to monitor the task. task_key is generated by ", + "concatenating dag_id and task_id.", + task_id, + ) + return task_id @property def _databricks_workflow_task_group(self) -> DatabricksWorkflowTaskGroup | None: From d905792e2e09a95e1c6bfe3f7a894b69a4b5c757 Mon Sep 17 00:00:00 2001 From: Ash Berlin-Taylor Date: Tue, 8 Oct 2024 18:13:44 +0100 Subject: [PATCH 023/125] Switch `kuberenetes_tests` to run with `uv run` (#42836) This is a no-op change right now, but as part of the provider re-org in #42505 this sets us up to be able to load the providers code in the tests The reason this change is done separately is that changes to breeze code form forks doesn't take effect, and this small change makes it easier to land on main without having to re-create that large PR. --- .../src/airflow_breeze/commands/kubernetes_commands.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py index 42161e2665ec4..c6cc343ae4aee 100644 --- a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py @@ -1429,15 +1429,14 @@ def _run_tests( extra_shell_args.append("--no-rcs") elif shell_binary.endswith("bash"): extra_shell_args.extend(["--norc", "--noprofile"]) - the_tests: list[str] = [] - command_to_run = " ".join([quote(arg) for arg in ["pytest", *the_tests, *test_args]]) + the_tests: list[str] = ["kubernetes_tests/"] + command_to_run = " ".join([quote(arg) for arg in ["uv", "run", "pytest", *the_tests, *test_args]]) get_console(output).print(f"[info] Command to run:[/] {command_to_run}") result = run_command( [shell_binary, *extra_shell_args, "-c", command_to_run], output=output, env=env, check=False, - cwd="kubernetes_tests", ) return result.returncode, f"Tests {kubectl_cluster_name}" From 063c550405e613b2d6ea50fd04bf63506410efd6 Mon Sep 17 00:00:00 2001 From: Gopal Dirisala <39794726+dirrao@users.noreply.github.com> Date: Tue, 8 Oct 2024 23:44:42 +0530 Subject: [PATCH 024/125] CI providers discovery update when airflow min 2.7.0 (#42767) --- scripts/in_container/verify_providers.py | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/scripts/in_container/verify_providers.py b/scripts/in_container/verify_providers.py index b3602dead84b4..f67334c451990 100755 --- a/scripts/in_container/verify_providers.py +++ b/scripts/in_container/verify_providers.py @@ -727,10 +727,6 @@ def verify_provider_classes() -> tuple[list[str], list[str]]: def run_provider_discovery(): - import packaging.version - - import airflow.version - console.print("[bright_blue]List all providers[/]\n") subprocess.run(["airflow", "providers", "list"], check=True) console.print("[bright_blue]List all hooks[/]\n") @@ -747,16 +743,10 @@ def run_provider_discovery(): subprocess.run(["airflow", "providers", "secrets"], check=True) console.print("[bright_blue]List all auth backends[/]\n") subprocess.run(["airflow", "providers", "auth"], check=True) - if packaging.version.parse(airflow.version.version) >= packaging.version.parse("2.7.0.dev0"): - # CI also check if our providers are installable and discoverable in airflow older versions - # But the triggers command is not available till airflow 2.7.0 - # TODO: Remove this condition once airflow dependency in providers are > 2.7.0 - console.print("[bright_blue]List all triggers[/]\n") - subprocess.run(["airflow", "providers", "triggers"], check=True) - # CI also check if our providers are installable and discoverable in airflow older versions - # But the executors command is not available till airflow 2.7.0 - console.print("[bright_blue]List all executors[/]\n") - subprocess.run(["airflow", "providers", "executors"], check=True) + console.print("[bright_blue]List all triggers[/]\n") + subprocess.run(["airflow", "providers", "triggers"], check=True) + console.print("[bright_blue]List all executors[/]\n") + subprocess.run(["airflow", "providers", "executors"], check=True) AIRFLOW_LOCAL_SETTINGS_PATH = Path("/opt/airflow") / "airflow_local_settings.py" From 3d55af546fec1a55b49d5d2812fca78105c52500 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Tue, 8 Oct 2024 19:19:43 +0100 Subject: [PATCH 025/125] Make Scarf details more prominent in 2.10 release notes (#42838) This PR moves Scarf entry in Release notes at top of 2.10.0 entries and also adds URL/IP-address info that is covered by [Scarf's privacy policy](https://about.scarf.sh/privacy-policy). --- RELEASE_NOTES.rst | 12 ++++++------ airflow/reproducible_build.yaml | 4 ++-- docs/apache-airflow/faq.rst | 1 + 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index 69e666461efae..c2408bd63f504 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -107,6 +107,12 @@ Airflow 2.10.0 (2024-08-15) Significant Changes ^^^^^^^^^^^^^^^^^^^ +Scarf based telemetry: Airflow now collect telemetry data (#39510) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" +Airflow integrates Scarf to collect basic usage data during operation. Deployments can opt-out of data collection by +setting the ``[usage_data_collection]enabled`` option to ``False``, or the ``SCARF_ANALYTICS=false`` environment variable. +See :ref:`Usage data collection FAQ ` for more information. + Datasets no longer trigger inactive DAGs (#38891) """"""""""""""""""""""""""""""""""""""""""""""""" @@ -155,12 +161,6 @@ Previously known as hybrid executors, this new feature allows Airflow to use mul to use a specific executor that suits its needs best. A single DAG can contain tasks all using different executors. Please see the Airflow documentation for more details. Note: This feature is still experimental. See `documentation on Executor `_ for a more detailed description. -Scarf based telemetry: Does Airflow collect any telemetry data? (#39510) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Airflow integrates Scarf to collect basic usage data during operation. Deployments can opt-out of data collection by setting the ``[usage_data_collection]enabled`` option to False, or the SCARF_ANALYTICS=false environment variable. -See `FAQ on this `_ for more information. - - New Features """""""""""" - AIP-61 Hybrid Execution (`AIP-61 `_) diff --git a/airflow/reproducible_build.yaml b/airflow/reproducible_build.yaml index 8a35282492059..bbf930c9d5fa2 100644 --- a/airflow/reproducible_build.yaml +++ b/airflow/reproducible_build.yaml @@ -1,2 +1,2 @@ -release-notes-hash: cc9c5c2ea1cade5d714aa4832587e13a -source-date-epoch: 1727595745 +release-notes-hash: 48b5906017b111b01aeb940a1ec904bd +source-date-epoch: 1728409953 diff --git a/docs/apache-airflow/faq.rst b/docs/apache-airflow/faq.rst index 0b2c76765e704..5d4aea6ddd562 100644 --- a/docs/apache-airflow/faq.rst +++ b/docs/apache-airflow/faq.rst @@ -548,3 +548,4 @@ The telemetry data collected is limited to the following: - Number of DAGs - Number of Airflow plugins - Number of timetables, Flask blueprints, Flask AppBuilder views, and Flask Appbuilder menu items from Airflow plugins +- and information from tracking pixels like URL / IP-address as documented in the `Scarf Privacy Policy `__. From c57be2ffc533a3002c0c05d9045c77717c8a0e36 Mon Sep 17 00:00:00 2001 From: Benoit Perigaud <8754100+b-per@users.noreply.github.com> Date: Tue, 8 Oct 2024 20:31:25 +0200 Subject: [PATCH 026/125] Add ability to provide proxy for dbt Cloud connection (#42737) * Add ability to provide proxy for dbt Cloud connection * Running pre-commit checks * Update current tests and add new test with proxy --- airflow/providers/dbt/cloud/hooks/dbt.py | 56 +++++++---- .../connections.rst | 14 +++ tests/providers/dbt/cloud/hooks/test_dbt.py | 95 ++++++++++++++++--- 3 files changed, 134 insertions(+), 31 deletions(-) diff --git a/airflow/providers/dbt/cloud/hooks/dbt.py b/airflow/providers/dbt/cloud/hooks/dbt.py index acffb477165c6..b13e1003b9c6f 100644 --- a/airflow/providers/dbt/cloud/hooks/dbt.py +++ b/airflow/providers/dbt/cloud/hooks/dbt.py @@ -26,7 +26,6 @@ from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, TypeVar, cast import aiohttp -from aiohttp import ClientResponseError from asgiref.sync import sync_to_async from requests.auth import AuthBase from requests.sessions import Session @@ -182,9 +181,12 @@ class DbtCloudHook(HttpHook): def get_ui_field_behaviour(cls) -> dict[str, Any]: """Build custom field behavior for the dbt Cloud connection form in the Airflow UI.""" return { - "hidden_fields": ["schema", "port", "extra"], + "hidden_fields": ["schema", "port"], "relabeling": {"login": "Account ID", "password": "API Token", "host": "Tenant"}, - "placeholders": {"host": "Defaults to 'cloud.getdbt.com'."}, + "placeholders": { + "host": "Defaults to 'cloud.getdbt.com'.", + "extra": "Optional JSON-formatted extra.", + }, } def __init__(self, dbt_cloud_conn_id: str = default_conn_name, *args, **kwargs) -> None: @@ -195,6 +197,10 @@ def __init__(self, dbt_cloud_conn_id: str = default_conn_name, *args, **kwargs) def _get_tenant_domain(conn: Connection) -> str: return conn.host or "cloud.getdbt.com" + @staticmethod + def _get_proxies(conn: Connection) -> dict[str, str] | None: + return conn.extra_dejson.get("proxies", None) + @staticmethod def get_request_url_params( tenant: str, endpoint: str, include_related: list[str] | None = None, *, api_version: str = "v2" @@ -238,14 +244,26 @@ async def get_job_details( endpoint = f"{account_id}/runs/{run_id}/" headers, tenant = await self.get_headers_tenants_from_connection() url, params = self.get_request_url_params(tenant, endpoint, include_related) - async with aiohttp.ClientSession(headers=headers) as session, session.get( - url, params=params - ) as response: - try: - response.raise_for_status() - return await response.json() - except ClientResponseError as e: - raise AirflowException(f"{e.status}:{e.message}") + proxies = self._get_proxies(self.connection) + async with aiohttp.ClientSession(headers=headers) as session: + if proxies is not None: + if url.startswith("https"): + proxy = proxies.get("https") + else: + proxy = proxies.get("http") + async with session.get(url, params=params, proxy=proxy) as response: + try: + response.raise_for_status() + return await response.json() + except aiohttp.ClientResponseError as e: + raise AirflowException(f"{e.status}:{e.message}") + else: + async with session.get(url, params=params) as response: + try: + response.raise_for_status() + return await response.json() + except aiohttp.ClientResponseError as e: + raise AirflowException(f"{e.status}:{e.message}") async def get_job_status( self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None @@ -280,8 +298,11 @@ def get_conn(self, *args, **kwargs) -> Session: return session - def _paginate(self, endpoint: str, payload: dict[str, Any] | None = None) -> list[Response]: - response = self.run(endpoint=endpoint, data=payload) + def _paginate( + self, endpoint: str, payload: dict[str, Any] | None = None, proxies: dict[str, str] | None = None + ) -> list[Response]: + extra_options = {"proxies": proxies} if proxies is not None else None + response = self.run(endpoint=endpoint, data=payload, extra_options=extra_options) resp_json = response.json() limit = resp_json["extra"]["filters"]["limit"] num_total_results = resp_json["extra"]["pagination"]["total_count"] @@ -292,7 +313,7 @@ def _paginate(self, endpoint: str, payload: dict[str, Any] | None = None) -> lis _paginate_payload["offset"] = limit while num_current_results < num_total_results: - response = self.run(endpoint=endpoint, data=_paginate_payload) + response = self.run(endpoint=endpoint, data=_paginate_payload, extra_options=extra_options) resp_json = response.json() results.append(response) num_current_results += resp_json["extra"]["pagination"]["count"] @@ -310,17 +331,20 @@ def _run_and_get_response( ) -> Any: self.method = method full_endpoint = f"api/{api_version}/accounts/{endpoint}" if endpoint else None + proxies = self._get_proxies(self.connection) + extra_options = {"proxies": proxies} if proxies is not None else None if paginate: if isinstance(payload, str): raise ValueError("Payload cannot be a string to paginate a response.") if full_endpoint: - return self._paginate(endpoint=full_endpoint, payload=payload) + return self._paginate(endpoint=full_endpoint, payload=payload, proxies=proxies) raise ValueError("An endpoint is needed to paginate a response.") - return self.run(endpoint=full_endpoint, data=payload) + # breakpoint() + return self.run(endpoint=full_endpoint, data=payload, extra_options=extra_options) def list_accounts(self) -> list[Response]: """ diff --git a/docs/apache-airflow-providers-dbt-cloud/connections.rst b/docs/apache-airflow-providers-dbt-cloud/connections.rst index f3514cc83a164..428c15d12ef8d 100644 --- a/docs/apache-airflow-providers-dbt-cloud/connections.rst +++ b/docs/apache-airflow-providers-dbt-cloud/connections.rst @@ -77,6 +77,20 @@ Host (optional) If using the Connection form in the Airflow UI, the Tenant domain can also be stored in the "Tenant" field. +Extra (optional) + Specify extra parameters as JSON dictionary. As of now, only `proxies` is supported when wanting to connect to dbt Cloud via a proxy. + + `proxies` should be a dictionary of proxies to be used by HTTP and HTTPS connections. + +.. code-block:: json + + { + "proxies": { + "http": "http://myproxy.mycompany.local:8080", + "https": "http://myproxy.mycompany.local:8080" + } + } + When specifying the connection as an environment variable, you should specify it following the standard syntax of a database connection. Note that all components of the URI should be URL-encoded. diff --git a/tests/providers/dbt/cloud/hooks/test_dbt.py b/tests/providers/dbt/cloud/hooks/test_dbt.py index 71ef75ba31de7..0d84189bc8049 100644 --- a/tests/providers/dbt/cloud/hooks/test_dbt.py +++ b/tests/providers/dbt/cloud/hooks/test_dbt.py @@ -40,9 +40,11 @@ ACCOUNT_ID_CONN = "account_id_conn" NO_ACCOUNT_ID_CONN = "no_account_id_conn" SINGLE_TENANT_CONN = "single_tenant_conn" +PROXY_CONN = "proxy_conn" DEFAULT_ACCOUNT_ID = 11111 ACCOUNT_ID = 22222 SINGLE_TENANT_DOMAIN = "single.tenant.getdbt.com" +EXTRA_PROXIES = {"proxies": {"https": "http://myproxy:1234"}} TOKEN = "token" PROJECT_ID = 33333 JOB_ID = 4444 @@ -136,9 +138,20 @@ def setup_class(self): host=SINGLE_TENANT_DOMAIN, ) + # Connection with a proxy set in extra parameters + proxy_conn = Connection( + conn_id=PROXY_CONN, + conn_type=DbtCloudHook.conn_type, + login=DEFAULT_ACCOUNT_ID, + password=TOKEN, + host=SINGLE_TENANT_DOMAIN, + extra=EXTRA_PROXIES, + ) + db.merge_conn(account_id_conn) db.merge_conn(no_account_id_conn) db.merge_conn(host_conn) + db.merge_conn(proxy_conn) @pytest.mark.parametrize( argnames="conn_id, url", @@ -196,7 +209,7 @@ def test_list_accounts(self, mock_http_run, mock_paginate, conn_id, account_id): hook.list_accounts() assert hook.method == "GET" - hook.run.assert_called_once_with(endpoint=None, data=None) + hook.run.assert_called_once_with(endpoint=None, data=None, extra_options=None) hook._paginate.assert_not_called() @pytest.mark.parametrize( @@ -213,7 +226,9 @@ def test_get_account(self, mock_http_run, mock_paginate, conn_id, account_id): assert hook.method == "GET" _account_id = account_id or DEFAULT_ACCOUNT_ID - hook.run.assert_called_once_with(endpoint=f"api/v2/accounts/{_account_id}/", data=None) + hook.run.assert_called_once_with( + endpoint=f"api/v2/accounts/{_account_id}/", data=None, extra_options=None + ) hook._paginate.assert_not_called() @pytest.mark.parametrize( @@ -232,7 +247,7 @@ def test_list_projects(self, mock_http_run, mock_paginate, conn_id, account_id): _account_id = account_id or DEFAULT_ACCOUNT_ID hook.run.assert_not_called() hook._paginate.assert_called_once_with( - endpoint=f"api/v3/accounts/{_account_id}/projects/", payload=None + endpoint=f"api/v3/accounts/{_account_id}/projects/", payload=None, proxies=None ) @pytest.mark.parametrize( @@ -250,7 +265,7 @@ def test_get_project(self, mock_http_run, mock_paginate, conn_id, account_id): _account_id = account_id or DEFAULT_ACCOUNT_ID hook.run.assert_called_once_with( - endpoint=f"api/v3/accounts/{_account_id}/projects/{PROJECT_ID}/", data=None + endpoint=f"api/v3/accounts/{_account_id}/projects/{PROJECT_ID}/", data=None, extra_options=None ) hook._paginate.assert_not_called() @@ -269,7 +284,9 @@ def test_list_jobs(self, mock_http_run, mock_paginate, conn_id, account_id): _account_id = account_id or DEFAULT_ACCOUNT_ID hook._paginate.assert_called_once_with( - endpoint=f"api/v2/accounts/{_account_id}/jobs/", payload={"order_by": None, "project_id": None} + endpoint=f"api/v2/accounts/{_account_id}/jobs/", + payload={"order_by": None, "project_id": None}, + proxies=None, ) hook.run.assert_not_called() @@ -290,6 +307,7 @@ def test_list_jobs_with_payload(self, mock_http_run, mock_paginate, conn_id, acc hook._paginate.assert_called_once_with( endpoint=f"api/v2/accounts/{_account_id}/jobs/", payload={"order_by": "-id", "project_id": PROJECT_ID}, + proxies=None, ) hook.run.assert_not_called() @@ -307,7 +325,9 @@ def test_get_job(self, mock_http_run, mock_paginate, conn_id, account_id): assert hook.method == "GET" _account_id = account_id or DEFAULT_ACCOUNT_ID - hook.run.assert_called_once_with(endpoint=f"api/v2/accounts/{_account_id}/jobs/{JOB_ID}", data=None) + hook.run.assert_called_once_with( + endpoint=f"api/v2/accounts/{_account_id}/jobs/{JOB_ID}", data=None, extra_options=None + ) hook._paginate.assert_not_called() @pytest.mark.parametrize( @@ -328,6 +348,7 @@ def test_trigger_job_run(self, mock_http_run, mock_paginate, conn_id, account_id hook.run.assert_called_once_with( endpoint=f"api/v2/accounts/{_account_id}/jobs/{JOB_ID}/run/", data=json.dumps({"cause": cause, "steps_override": None, "schema_override": None}), + extra_options=None, ) hook._paginate.assert_not_called() @@ -359,6 +380,7 @@ def test_trigger_job_run_with_overrides(self, mock_http_run, mock_paginate, conn data=json.dumps( {"cause": cause, "steps_override": steps_override, "schema_override": schema_override} ), + extra_options=None, ) hook._paginate.assert_not_called() @@ -393,6 +415,7 @@ def test_trigger_job_run_with_additional_run_configs( "generate_docs_override": False, } ), + extra_options=None, ) hook._paginate.assert_not_called() @@ -422,6 +445,7 @@ def test_trigger_job_run_with_longer_cause(self, mock_http_run, mock_paginate, c hook.run.assert_called_once_with( endpoint=f"api/v2/accounts/{_account_id}/jobs/{JOB_ID}/run/", data=json.dumps({"cause": expected_cause, "steps_override": None, "schema_override": None}), + extra_options=None, ) hook._paginate.assert_not_called() @@ -467,7 +491,9 @@ def test_trigger_job_run_with_retry_from_failure( hook._paginate.assert_not_called() if should_use_rerun: hook.run.assert_called_once_with( - endpoint=f"api/v2/accounts/{_account_id}/jobs/{JOB_ID}/rerun/", data=None + endpoint=f"api/v2/accounts/{_account_id}/jobs/{JOB_ID}/rerun/", + data=None, + extra_options=None, ) else: hook.run.assert_called_once_with( @@ -479,8 +505,31 @@ def test_trigger_job_run_with_retry_from_failure( "schema_override": None, } ), + extra_options=None, ) + @pytest.mark.parametrize( + argnames="conn_id, account_id", + argvalues=[(PROXY_CONN, ACCOUNT_ID)], + ids=["proxy_connection"], + ) + @patch.object(DbtCloudHook, "run") + @patch.object(DbtCloudHook, "_paginate") + def test_trigger_job_run_with_proxy(self, mock_http_run, mock_paginate, conn_id, account_id): + hook = DbtCloudHook(conn_id) + cause = "" + hook.trigger_job_run(job_id=JOB_ID, cause=cause, account_id=account_id) + + assert hook.method == "POST" + + _account_id = account_id or DEFAULT_ACCOUNT_ID + hook.run.assert_called_once_with( + endpoint=f"api/v2/accounts/{_account_id}/jobs/{JOB_ID}/run/", + data=json.dumps({"cause": cause, "steps_override": None, "schema_override": None}), + extra_options={"proxies": {"https": "http://myproxy:1234"}}, + ) + hook._paginate.assert_not_called() + @pytest.mark.parametrize( argnames="conn_id, account_id", argvalues=[(ACCOUNT_ID_CONN, None), (NO_ACCOUNT_ID_CONN, ACCOUNT_ID)], @@ -503,6 +552,7 @@ def test_list_job_runs(self, mock_http_run, mock_paginate, conn_id, account_id): "job_definition_id": None, "order_by": None, }, + proxies=None, ) @pytest.mark.parametrize( @@ -529,6 +579,7 @@ def test_list_job_runs_with_payload(self, mock_http_run, mock_paginate, conn_id, "job_definition_id": JOB_ID, "order_by": "id", }, + proxies=None, ) @pytest.mark.parametrize( @@ -544,7 +595,9 @@ def test_get_job_runs(self, mock_http_run, conn_id, account_id): assert hook.method == "GET" _account_id = account_id or DEFAULT_ACCOUNT_ID - hook.run.assert_called_once_with(endpoint=f"api/v2/accounts/{_account_id}/runs/", data=None) + hook.run.assert_called_once_with( + endpoint=f"api/v2/accounts/{_account_id}/runs/", data=None, extra_options=None + ) @pytest.mark.parametrize( argnames="conn_id, account_id", @@ -561,7 +614,9 @@ def test_get_job_run(self, mock_http_run, mock_paginate, conn_id, account_id): _account_id = account_id or DEFAULT_ACCOUNT_ID hook.run.assert_called_once_with( - endpoint=f"api/v2/accounts/{_account_id}/runs/{RUN_ID}/", data={"include_related": None} + endpoint=f"api/v2/accounts/{_account_id}/runs/{RUN_ID}/", + data={"include_related": None}, + extra_options=None, ) hook._paginate.assert_not_called() @@ -580,7 +635,9 @@ def test_get_job_run_with_payload(self, mock_http_run, mock_paginate, conn_id, a _account_id = account_id or DEFAULT_ACCOUNT_ID hook.run.assert_called_once_with( - endpoint=f"api/v2/accounts/{_account_id}/runs/{RUN_ID}/", data={"include_related": ["triggers"]} + endpoint=f"api/v2/accounts/{_account_id}/runs/{RUN_ID}/", + data={"include_related": ["triggers"]}, + extra_options=None, ) hook._paginate.assert_not_called() @@ -645,7 +702,7 @@ def test_cancel_job_run(self, mock_http_run, mock_paginate, conn_id, account_id) _account_id = account_id or DEFAULT_ACCOUNT_ID hook.run.assert_called_once_with( - endpoint=f"api/v2/accounts/{_account_id}/runs/{RUN_ID}/cancel/", data=None + endpoint=f"api/v2/accounts/{_account_id}/runs/{RUN_ID}/cancel/", data=None, extra_options=None ) hook._paginate.assert_not_called() @@ -664,7 +721,9 @@ def test_list_job_run_artifacts(self, mock_http_run, mock_paginate, conn_id, acc _account_id = account_id or DEFAULT_ACCOUNT_ID hook.run.assert_called_once_with( - endpoint=f"api/v2/accounts/{_account_id}/runs/{RUN_ID}/artifacts/", data={"step": None} + endpoint=f"api/v2/accounts/{_account_id}/runs/{RUN_ID}/artifacts/", + data={"step": None}, + extra_options=None, ) hook._paginate.assert_not_called() @@ -683,7 +742,9 @@ def test_list_job_run_artifacts_with_payload(self, mock_http_run, mock_paginate, _account_id = account_id or DEFAULT_ACCOUNT_ID hook.run.assert_called_once_with( - endpoint=f"api/v2/accounts/{_account_id}/runs/{RUN_ID}/artifacts/", data={"step": 2} + endpoint=f"api/v2/accounts/{_account_id}/runs/{RUN_ID}/artifacts/", + data={"step": 2}, + extra_options=None, ) hook._paginate.assert_not_called() @@ -703,7 +764,9 @@ def test_get_job_run_artifact(self, mock_http_run, mock_paginate, conn_id, accou _account_id = account_id or DEFAULT_ACCOUNT_ID hook.run.assert_called_once_with( - endpoint=f"api/v2/accounts/{_account_id}/runs/{RUN_ID}/artifacts/{path}", data={"step": None} + endpoint=f"api/v2/accounts/{_account_id}/runs/{RUN_ID}/artifacts/{path}", + data={"step": None}, + extra_options=None, ) hook._paginate.assert_not_called() @@ -723,7 +786,9 @@ def test_get_job_run_artifact_with_payload(self, mock_http_run, mock_paginate, c _account_id = account_id or DEFAULT_ACCOUNT_ID hook.run.assert_called_once_with( - endpoint=f"api/v2/accounts/{_account_id}/runs/{RUN_ID}/artifacts/{path}", data={"step": 2} + endpoint=f"api/v2/accounts/{_account_id}/runs/{RUN_ID}/artifacts/{path}", + data={"step": 2}, + extra_options=None, ) hook._paginate.assert_not_called() From 5be7b21cbc98d91a2fa29a47f9567b1ebb235c86 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Tue, 8 Oct 2024 19:57:53 +0100 Subject: [PATCH 027/125] Update CODEOWNERS to update Kaxil's entry (#42839) I haven't been that active coding or reviewing. Although I am now returning back to focus on Task SDK (AIP-72) so narrowing down my entries in CODEOWNERS. --- .github/CODEOWNERS | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 4de511fdfafc2..c20fe916f92d4 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,10 +1,10 @@ # Core -/airflow/executors/ @kaxil @XD-DENG @ashb @o-nikolas @pierrejeambrun @hussein-awala -/airflow/jobs/ @kaxil @ashb @XD-DENG -/airflow/models/ @kaxil @XD-DENG @ashb +/airflow/executors/ @XD-DENG @ashb @o-nikolas @pierrejeambrun @hussein-awala +/airflow/jobs/ @ashb @XD-DENG +/airflow/models/ @XD-DENG @ashb # DAG Serialization -/airflow/serialization/ @kaxil @ashb @bolkedebruin +/airflow/serialization/ @ashb @bolkedebruin # DAG Parsing /airflow/dag_processing @jedcunningham @ephraimbuddy @@ -17,7 +17,7 @@ /chart/ @dstandish @jedcunningham @hussein-awala # Docs (without Providers) -/docs/*.py @kaxil @potiuk +/docs/*.py @potiuk /docs/apache-airflow @potiuk /docs/docker-stack @potiuk /docs/helm-chart @dstandish @jedcunningham @@ -58,7 +58,7 @@ /docs/apache-airflow/concepts/deferring.rst @dstandish @hussein-awala # Secrets Backends -/airflow/secrets @dstandish @kaxil @potiuk @ashb +/airflow/secrets @dstandish @potiuk @ashb # Providers /airflow/providers/amazon/ @eladkal @o-nikolas @@ -85,7 +85,7 @@ /tests/system/providers/amazon/ @eladkal @o-nikolas # Dev tools -/.github/workflows/ @potiuk @ashb @kaxil +/.github/workflows/ @potiuk @ashb /dev/ @potiuk @ashb @jedcunningham /docker_tests/ @potiuk @ashb /provider_packages/ @potiuk @ashb @@ -95,7 +95,7 @@ Dockerfile.ci @potiuk @ashb # Releasing Guides & Project Guidelines /dev/PROJECT_GUIDELINES.md @kaxil -/dev/PROVIDER_PACKAGE_DETAILS.md @kaxil +/dev/PROVIDER_PACKAGE_DETAILS.md @eladkal /dev/README.md @kaxil /dev/README_RELEASE_*.md @kaxil @pierrejeambrun /dev/README_RELEASE_PROVIDER_PACKAGES.md @eladkal From 5a146f75c5d2910156c04425912fb86e327fcf5e Mon Sep 17 00:00:00 2001 From: Guillaume Lostis Date: Tue, 8 Oct 2024 19:05:48 +0000 Subject: [PATCH 028/125] Remove unused ldap3 dependency (#42829) --- hatch_build.py | 1 - 1 file changed, 1 deletion(-) diff --git a/hatch_build.py b/hatch_build.py index 53c2bb4fa5236..72f59c79ef9ff 100644 --- a/hatch_build.py +++ b/hatch_build.py @@ -97,7 +97,6 @@ "thrift-sasl>=0.2.0", ], "ldap": [ - "ldap3>=2.5.1", "python-ldap", ], "leveldb": [ From b98c620a387af8c904732608975c49f83fca185c Mon Sep 17 00:00:00 2001 From: Zach Liu Date: Tue, 8 Oct 2024 15:37:25 -0400 Subject: [PATCH 029/125] Improve docs on adding Google secrets in AWS Secrets Manager (#42832) The [existing documentation](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/secrets-backends/aws-secrets-manager.html#example-of-storing-google-secrets-in-aws-secrets-manager) on how to set up Google secrets in AWS Secrets Manager is out of date. It led me on a merry chase for hours. I hereby submit this PR to update the doc. I solemnly swear that the content has been verified using DAG code similar to this ```python gsheet = GSheetsHook(gcp_conn_id=gcp_conn_id) values = gsheet.get_values( spreadsheet_id=spreadsheet_id, range_=f"{sheet_name}!B1:B2", ) ` --- .../img/aws-secrets-manager-gcp.png | Bin 0 -> 13453 bytes .../secrets-backends/aws-secrets-manager.rst | 20 ++++++++++++++---- 2 files changed, 16 insertions(+), 4 deletions(-) create mode 100644 docs/apache-airflow-providers-amazon/img/aws-secrets-manager-gcp.png diff --git a/docs/apache-airflow-providers-amazon/img/aws-secrets-manager-gcp.png b/docs/apache-airflow-providers-amazon/img/aws-secrets-manager-gcp.png new file mode 100644 index 0000000000000000000000000000000000000000..abaaba79fb211fbd101779cc3b34d57b0f5ba4f3 GIT binary patch literal 13453 zcmch;2UL@3yDp65s52HAqzMR&Afi;IcXR|ns#2vSsPqydy%R@85CjGgkUj`V4G|#; zArvbhy@dcF2}lVLdT&Y2gY)hEuXWZs>)&gi@9cMlbf(--#ZcV4|G@z)%^TkYE2jWZ$VkOrf(YuxCAa|<6v7udZyzv#|g zHTbr^Kk0q2V=k&9=--sb!BQQg)uYyND1U0}e&mvB`KwplN5B`ev7L113I1oVVbg&4 z_z1)@V=cj^{mC86Dve);!O(E`4cc1n19JA50e&rFBE<9?kio%dOOd=kLsx zGkwhRkHbmTMrLO4&Qi{=kW+`x8>hY*?moTTTab%ROMBV~DSUoL(E_tPwmEqajo)67 zs!BQe{nOVDoo>gvfRVNDvOgc*v?6D|<3!A~Mme~;SShcYb(|GHDW4I8Cr!tf)R=~@ zj3=)>eAU+0*4rP}MLfeHqJ7{N_hU30^y0>c(YcU7+9+C{l_m$T$P_+%Ry|}kM(KPG zWK7UBYt1(xz<3BZR7L$8cY432%btj$dJj|$py?{QQ`DFRrMO{Sw8ObkLF$V*4m~|z5dy#0Y<~Q}))$rD>mNlLV z`&u8z;`+kWn*GviT%w|4r`GW&c5*S$qq zsI=X$;@i`|{z#@6yr{-`-Dq%z5OS zuyl2WwW7w#;%V`Ga*7Jt>?cPtF#Q>;fw{RkA^gTK?Cjqncvz3|5PZ1!#fznuk`M?v ze0#|(M-%-l8i$V|fuY;AxrfTbTaCO2_UhK1mwh$rWvpiSmT`?c1RroVd0NVv#cb{C z+*0uwJFon(wud5w_EKvMTJ=}*XqO-bFP!C!jAZc1K^Mqao!M^|`C23^NvjF#Ki#B4Q}sJzd@WfqB81=Yre4yfY=2zt(h`M~WZGWOj&Rli{x zlWb3oTt~-N+Tyj<)NFNKBh~b|kPA zhf5%Y#7#|1PMr(`CKmF2%7fE`TtEHl_N0)P`Y?%v_U@DR>T%LX?CicY(?2cJU+mwy zv-8ZKCKucGx=&16Mh1;WYXn9(>MrSol$w`yc6HXG(R#Y;!(*Ms7UtGjv3XfpVH4k7 zIMo8@U&aoP5Z={PTo4t~sD+d3UB1TgNJ~qPksYKgFUyHOn20_wk?(qLK3S*_ z8RMkIu?-F=`V8LD&C@)?o19iLRONtHrb}NbEBpBIf`^BPsJX_n7|k~N*)wNn=h(Ir zH?=#}wUm^K9|x)vtE_P5#rjDi`C7qEF7d7<-8pH^><;k;mX;eT)NT#Gg^-62&2fIp zS)cc_Y^0w_nB~Q_!J0KPn?;#6=(5nxAHHwLp8G~UrltZ~_E2w{(J(aJCjzF5*$^>Z zJt-jvtsiR*?@iHLS3sP{`xP5nRakeAQid=NUmRnisT=Cfd>QJt@Tk1ax3efhu~BP2 zue5gL-on$EB*ksWz#8D}z1fZnUC9}DA2E7jFScB4!fVN08I+=&x0r+4n(yFcEz+3Y<( zR`wL8mQCx)*H9xkV4QDkX!p7_~VX*p<+W*jv@;8kv)!hU;8ip?Foa)7x-Jm>OpvalpRO z&-QMv*kLo-E#EPj`m}!0R8Ulue()f9*RsM|M)!*w>!V(L#^Tw#hKZexIv_tDB`>t=KjQ4;Q5$Gf;ohkVkiM)6BErbvaT~F=~U1n zWncC3?6=es5MPIDi24J6JAX?NJ?cT`8C}N1l{vq>ZOEvU}tTPHV=)` z6+SpQIq6N!6X?mwcWvDi6&1OYX>w2pT$qfqa(7#=L271_u!isC4DuA89Gt||!9wAL zH@H|`-CK#-SEpMNvcWWh1_dvTuB%?Y%qsYiu5im(85-U|UG8@M-!g*VgoFmx8n?k9 zq}SYd)T_m5i;w#3mXEk1y1%^`Pz5p4)y`$^K2OM2$X*<4vgHa;5S>qne(LS*{Z{Qd*I)cz+Y9EhIVWM^N%VVv zRYm36wJM8}tu>c;&F~;DPJe}(Mm&~XSn+QMH9_EI&WanVY7m)IlUK3?!|=HJh0!Kl zAU&OTW(Ks6N6LDZMwZBz&&l;#Ce<{4-d{9KuN@>1uGqgdu6diFpLp(VD8?yfEA>)h z4F=OsARzA)L?j3h{1=P7Xr@{IZGy_Bs24+`552t`f;RnZGU~zh{Q2_%o!n7ulFbnA zIXnMgeV~z;SmAr%$|bGLb!Q@Vh~zFSM@Q=DsOLD_vPCOGG~!DHKz1 z?#q`tJ9PQYnA3iKm6@zNKN8wF>xra)Qd_o!X8Q0yY49@p|71S=Z%C>C&yUFE^Y#7u zuMWah((I73OaCPBA$VU`)gR{aKa%wSpIi7pGt_+9uo!l};9n1W{P@j{e_!2G#xkwf z-rtG#Z09TqrBMS~TH+(EEE3P3jNR2Jmz2%ME^aNTcn`oe);!jCz723_=d2J^)>+j* zmg;!3VM!gdu75${Mvr5z2p!t|Ce-eEJ$+PgpHZmu2y08Ocx!0&n%&0X7tZ8~O;~(g zFQU4%^z~gXoj%Q>{(eah+c&I1I+td_AV~3er41n^g!E5;l+HP@?(D=K8b%k9hBQ9E99c6ps(T)fzIBN#3Fpj3mt9piYweDp4+ zYaXhk=RB48IKSsakB?*Enr#Hd_m!(A$*_B)o*zp;e=A5~(%{@mrWh*Ik z%R%3s^s#BL^ubdQ4u4wK$zgsSTf->pL@Cf`U@TPAZgn0$zA7=%@%sIT4^!~*>%+-6 zF^g*>lm2&fgO)~`{d<@<7!B&RUXjZm3r*~UG*5=TL6s2e9%#W)!qrW6PM+A~ImOytJxr(`Bd%Y|uhd`-riMy9 z5wO7wtCeCXj&AH`6hT$mY@szl8Zm1*xw>BDL-(2Wtk|o+_o5Bje3^5X;bZ*Vjy!;S zdC4xmtzF0Ll$?a9NXW2zndOOtLXemz0|OYfEC16+`R?kKD|7r$$B=Gl$498bx@ zCN3zDB=@&xn&H!iaQ<0(t{P%)28K9+pWjj{k2ipoWIf#rCDfugd4v?ax;c{X_EDf( zo*IO);Bip$=3cg_AG{7nO}%=hB&ZB0<5Xm|qgEbMhK5|IaFXG``t@ZfrSW8duF#>q zQEq*WSO`mUd=(o|Y1?kF-IJ4(R{)K?QhUpzkSC8_)K5k70kcfv&$}-^&!^c(X*;7F z2ZvjxIgWpcZMz>V*d@*P1WGv_AAH$stnPhutmV6_?hx|Y_J`L}AFI_z%S+^-fsZa* zbvrcY;Fwi6y0dHtJ$E-gF!rzAK31uKD$l7jj`LGJDK0A;wJ=pD*;9fJtbU2^jx-OE@UW@>S&g-@)BQ@yHveN6-j&C|iO^n3)mJWZq4OGZ>< zd%?`qbV(!N`s8t86M?WCO@GRAH#uk-?M;63ZCZ*Lfuh_gv)mkmWT|*u7F{IOj=c-T z$JW9J;^;r>=7Sg>|9a?Hmrr|B_yD_Rsa=1m+oHVaSsT%M&Jw3hH<%MLLq`QX;N3|z9Y+p8ivZUwIrHlOh zf}U@us3q2vjAnhk*4VZu;!=Flwq=2^q;ZD{--a_on**^m&Y2GOw1 zTsU_R&nWj|L^VQMihKqn#oM!H=RCa9P;`&t>U=?1noTO4h*Rhx&}KDP7~`MB9OcTowjfeEITapdey@rejWBT^*57 z>g-(Pzi=3TKOpX{p2)K%$k>Dp8Jl*@)^E+^#{LYGA>8N4?j4@GvcI(0B!s404<p$Ht2~evdjne8gv{ zepHgPFJNuzs}%UZCJE}SJk(jY(wXXY!PmujQ+H~kkjAzB^VaAisU4U1Z%wqV7#f-9 z1moMLrBq~hp+>iF^CZ{Rfs*b?B#II;P-SSB{#&vFO%_I}r_U^E0FxyW2S|vWw(3;3 zDhGV%TIa5p3PX2lC3zCN{Br{pLkO42KCbmE1>>I|J3BxBIx{n_tv`*f!obwHXK)_Q zF;go}2NK_QU4@ET<%KduRi{Ob`%-x7b3l)k&u(a6M{Iem1 zE7@69J`nQzhUhz-L23=GFcrpI^ih8r*SiQA3YfjzOkVyXV;D^T~g`T zt98F|?{9pqXi1YkUT;hH@LI%kxj43zh9l)OyVlC*sb#l6hgnJ4#3hCXu_h*OozWCf zdc^VTDN?*Oe7sJ?lx}AY&BZWd?_l6|nv}I_;MS+A#uGl2 zdM9W1--J1FzU%D%{tCbi?uP}cs3?#Z{b!k}=CSuV4*L&3MevgQ z$gtTxSb2(KhSU~2%AnL;^uBC+RPH}93Mx+cN!#b%SaD-D$XmiOUiM>Fhj+2XU0-EU z0*}CO!oJMacpkn#B=4Pb7B*+nKcHdN=|9S&5Mra!+~~H)!n{#|m7ze@rr#ig{%T=b zODKIi;4{mw{BNgc$7YOh`f&F#R^Y#m@H$TYKxeOCN&NSr!~dBm z2E6KY7&ntW3OH5o881k1|`eViPe$p`jWJM1< zU2(3bEc&}|yt~08HO%W1#P&f$YWDU@N*djOxa0P&^(VH=Ie#ovbFX!-)b0GtEc}U$ z`;V_DG3O1w=sefrWxGDX{V*x|AGgnTr~I#f`uzcM%h1rXYG89CApR|DP;-wclf~pp zP4Ub1ady9Vkx}@&YS4cR;1AIk7Z*=BuSLQ6G~RW(G!=mfrXIQZJi&73Y==!{74e;I zYrI;|CemdAc!+K=A+;lV!f3l%4Zg;!N zu_3~PHog$PYP`FK`Dzc6^bn%=L12e={!tAqT-N?=C*uSMsgb%n_(G?Cdsbny@C6wk zc$b0zP*kU_BSM*x8nifij-NkjpOJKeU06GG;oVwZ%}AZcdV>gWj#}X0moKWs;|ki@ zJc4}ql@bx%i!AXjgfTNw3+ssWhE)4?jS_UQ9>lp}krV!lyUQWL^2H@3DxUrAB$C&H zd$Hr&1EX4Zg3p(gg2u+M_G}LF@M@}w@k{!UFSO2mF4IDv$J%_hx~u+|=q23m73r~c zM7X@uXsd*2j^|+MjjA)J6Z}|>vwq6TI8!qNgSNmh_j@5#15ynWL$6X&Qs^78v26|| z+1c5RcUdhhm%v3`lY!YvUTe21ObS$J#HilsX>^4Jr{qk45T|f5(ZUmLS8UEm{R$Rr zu%WyAy|NT>Er8WrR8_qwD@$qXy`u}@$M_;vTj^!A9l7-G({d}xc!ShiXJk6N;)`hB z^FzqVMKWo4_%iA^K(TKj#D6{-$%sf-WL^8_)`RS=Mc4j7&H+jT@N=T_F0rzf4F-e5 zQO7swJTUCn%bG^*VFLvN{pKEJwG{Wi)<-<#Hin`^FZla*eRC%|iq@C&zpF-ZnmTU4f(m>zhgS$Vb}l8)28AqWzdi+I!`+_EF3E zp_=Wiz~(9GNq}xa>Nm^nK37-SKh4iTwU>;8BJ7Gqi4_(#J++_=+v^;BW!V+rNUc~$ z*cIJYy?eLMsl}umLM$}5in;ML2B1gngu=c*pT;0a^@Wut#gxbsQx5&iqcQA)*Y?m( zDX^Z!Ptis=*}B+QD9g!7Gq(n6nLPx1JRuvc9Woni@-(MVLUd-vb;i|o2GNv$+rgos zLch?;Ysdw%;+@tIvg^0f%TY0+W>w=q-&X;@b?X-N^Dut-aaq1G>(O9coYg|`@(2+z z-fIeBvUUg(1KM7rZxUjq9jo^?=4!HDsKCif;J(0pyC5aHwB37SKT=H>{mf{chyS|u zBA}uwU1@{hc7UFRZe}jR&L$5I4ld7hZM*l@8V0s*3E}h9{7n_$C9+x)rp$ZmJ97i$ zU=&SrwZ#w++|{V>uhNkulA3Dx=3G8N-$KqefIc%(zr8Oa9Lu`|B)u||(bhM4 zDrzt4^BzE#+be9$^0hRocJr23qVT__z6Bdk=tsa=lDWCoI|n;0*M|oOr|@uBQ1SzK zv8Au?LX8PnJ=CbDDXFPg_&(v&SVXr9RYWx;WO9#SBb$3oK_T~QXn9jplMPB(BbXU_ zdVGoe0UvyHKXiMJit0gS54~v1gV>8u*26=3mOpde^dI_-lXGr_gtw~TwTbMJgRV?2 z;HbG8h-;`PZjoxV-U%wPa&PzV9v?T_=xr3##p`eD3Y~NE-F zwC5)Z^+i+sH*|}FU?qKp&jtDUBNb>;M-R8BVZuvRk~ZD-4kMDb!1YeH-f%M&+H4GQ zZ7=#2)E@Z{G#{-zs&O@8U!;=X=+zuy>^W>TzParP_`UrI_KoeO+|B4WZ{CE5ZJ}k{ z{6DyD7aYZtYJJk^c`zvaFC=pD$u8AVGuUst8)<`r*w?s5LL*(nvvsHN%LoTnM+=-5 z75RO(fVV4qb4&QQb0KmlzW!etf)OAzv?iWEn0(hWr$7r@-dueYtQ9{0IyQo}m=m*o za4Yd#Ix#Lc!v~^{g%jUe%e1aKu?{|ot6mKF2d_l!2=cM9aVs2Z0d#_ALI49XmmgV> znHN|BsnJb(`UG>Z7Z0dJPxNR-&tu?VDz#X?Q^UYRHtj_0A?X=_`4m)-umm(fcR?HO z7Zh>?ApSkBQJa0Qew9dKXzx5d(vzn}G?r^C7Z*LmUTZ4)iY;P@O~3iMc-V^4JX4b# zh~FG^u1z7Z)K1~G(ZBqC9;?}usEocjRWRFgn&X$caK>H&PX}+>6$`tffY!ui$A&2> zQQ;*Yr&|f{-TB1<8 ztUad?7YycNHKlYEL@*OTATN^VnYNaN=}ST28|zHX%#w zem2${;981ZiS3V>k&XKt))j=Kd*Q5x2m+yk2)LGo(Hc~uKTM)EOZ1HzKjFiN>oxg> z6@}R~FC+IkUT0?~2`ZTD3MhGwJK%=^bttIZHXUIDinYM~#JeKwaKH)1hNNK`o?Bzy zuqDcwhJ|n!Vj|VSVLsLX@LY$0bS0on3JY^__%zsU3Da}-AFh4F(9(%d;$v(4VK;By ztIZYX?{X{{PxB|@@Dkb-U1>m=YXtIv$?sc zWO2Ya>PN2WvRZdFQRFbpkevqI8W{h}VgK|mb=m8%(RUsl?=C#GQ-n*3T9g=%fzXAr z$3(5}ZZtrRbUiIm)&n1*x{{Wc9p60DGd6w+2-BIfY+7244V0w=55x9R3K+ok>3Be) zvZ5m0BDB3e9xN-Fxh&Q0@1FpR>U}uPy6fcC;4NvJq+2DXxuMIA)|pyx%6qhb2|Hk? zxsoS7A4lwNjy=W)1A^F}U9VfOjS9C9R`eYF8!jSe!h!ZwYiVhTGq_*Vlmfcb>1H?X$jq21#XVm3*&8$Bg@Om7{p9r? zxNK@_>LG0qsu?l-mw1P@>QF%M`L|KXqM{=Hd_?HMm)kHHjLXSsms*nsYK4=q#jI(d z9Du&}4uxXn&n5%;v`dJKH-@Y}>>gW=+VTA}wy2~yqqf$v%+jjD(r2*r zV`A2*JdZvC76_0RYXXgb463el}r0<@?EhQK=Ri;{#=SglCep| zDoA+i%e_a$C9Pcbru;?2oxlF-H7_=FZMpv~xJS8UQo=NHpTSX3@Bkn$6BC?^q_i${ zPY&F|+`NCC)&qmFSWDKJX>a+SN3y@98E$ecT*{F4$V^E&yHDg_&QvV4q-^!1)c{II zMg}>~Lh4*pT$uPAlewInnORm)Q1D5~T1;BH^7okORZMTh!>NJaQD@YMXRc=d;}He` zMXmg2iX1`lcUeEbsVCbX0 zE0JDmYD**ZRI_a0MMp=|1+V#aVq~)ChXR!WL#U##@4Z(7Z47hvQhV3D4W1hwuq+_V zxq2(O7K4$r$9VGt3Jpr?|MF!RXkRW~yx2^sM1s`!V(lN%YsXL*EN--1%qGf)czW91 zbH4``?(R!UHlUd@72YZ;E)Lxm$wB1XQZIoOf9h2H`1p7y<;l%I za&x`nV-1Vj+YAtkehh^tPx62-w7qxFIUCyDVyByTJjVUjm5Ub@#l*zyT|rm8;5LyT z4?=AvJUu*KJW;YP1SI1}_?pSj%r1GIYisay|C`-jM?2q++XqZ=GXjb1TS4fY^CeO8 z&JqENtHRbK zz|R(VY(?z{iLR}dDAWfn?Tk9c_YDxak|oUYLGm0h+p7)qSHzm;Gbh8&^7BuJPkYWB ziy0}kEC;a;X}jGqr{H$;^Ct|9T83-3wB8C*@>Elc+TG0YBKkbj*LC0esQYE?)4|85 z`UMXjnB{49jew#JQ0VR+U><3C_Rv#HHLP*q;BVgi+;k9T>Sq`?W8eoZ%&_wG99`jP zJ+Bs~i4xGA8pEC1UeYlzFp!441%?8)ppP+qEiLsxyTElR%GnB2Ia9e-sop?^+S`ZE zw+<}pgq%Bjc0RaO2c<>I0S1?rlT%qynELv4!0uXF>x~;Am!*>iQSfCYdX$>)+QvZ1 zM`~BL2rutjwF?+9SA24N6vRp|m<*;$<3Mj;-!*0BExf;kw5BG_Oi@o)*EE0iYkY>B zBgyRT&dyFCUFG?+XVAJ3kLhxBWcYUbNDGD1O+YW%)X?_GtIe$OiIM>~>&klt9s@15 zq>Z(OYJVbSQ6nfiIwt1%b0Li=d2Q_r#DX>ir{6pU*)dW>Q(@GLgDD-HQ^5M{PLEdp zUcnpprNtzhI%}lS&c^Q-ak#p{Z(bjf?dd=gz<1=xpJ`H7V;da; zUDK%01gQ15Z{L7&X?^8UN6AZ4RIs%Pl#iDe+TZU71}n=e?UeRQ#{QqanSg+RnA5_u_XaM|@{rjTM%|&n;WvXU&FoZ$G<0u09BBtMMYI98 zZtUD-S!zH);9Fyc;-wi|K!U5l?s>V{+|<4FQbfUTuugeVU2Q1foRZ(p+pgwm1$lw| zD0vr3Lw?Z0n3hZbvyURD+x7;5B#j<5qUy4?Lkvp!*4sM{^5;eMfZIk!%fkiamvbUC z??Uskp(;CVFi4GI+eSu4!NI`{*t(pp*>k?+ZJ;69%0}_Y?1rxC=K~rF2Pl19{S>-o zrsfy5(5;0-vEnuJ=Ob;(aYebI%IV`-gY5>fTuQCor@K*aOgfy5I4+(?n?HwAUF zcc%#*G=RTL-|z2kO)2g`kT8>jt5lvXi-wceE~)V~L=+1+cIIeLJ^d45pLw}x#J}kb z$kiF6X~(>qAFaMG?y&xO2O_m0rLKlQvwQ#kOzt|f>+D%SvoyW5DiBBga+#PCy0;ts zqPVyfa$;k@ATo1XH3ErPmuSges^(@?19|LY>=VFy9l{ZlcGNC{OHUOtdWrV6BLhU{ z3TqUQ2O1}H54DDmN5^N>n2a@eKp0^oJwVXQZumr5UIY@) z!Z(HAC%K^ENL{9+O^0}qRy{)hcK*)(*cRRiZc{kkw@3XuY5PN0_7LA~n?r%_N}@dQ zJR^890M`IL;BB4!Nai#pa-W3tD%>^seVVs3KVuR5M6U=0V!i%QCdJ)2F5)AV_v_B_ zE46_g&k+mgLxs;SpQBZoLz#bxit0_(5+E%4D>wn4O|KR^5s!~kNnLaeH+%#%1LOHE z2+^I^ckho}{=NG&@lJ^A4x%%gDPg{`efq>95IYXJ`M>k)zBUSnXO`8SCdW`B-Cldy zK<_7+62*Pb zv#wlhTMhMH4NZ=@Gsc`9&HBVf$IJkpRYfP9x#pAq^ua_j?~LoFmIIL7okfSOw7mezWGUWAP``XV(Cg@dYzS%o zZMLs0la#NH5BORiwj^uOk|h1<+K)~)1jf;Z1A#y(MAsR-3_Z*3WU^XOP;r`f=)sgN zXW8rC^b&r5$69Bo(1hjwR`-u{$Ry ze4NdXuXEz2X>y`}Zi~bjL~;<{2=E>b_Xqp(kVHs~zOKpZJ=>XlCkZpY4pU}oA1(YEWBTO$6Zm#dFh_vcC-(gV_1Vb)fyu{A0Ha9i5e)Gf@ zk~ZNt*RI5R9M0nngEqZHTBG0Rv&ewV4Br-+pP#QALPBbv{@_#r{WSYvE#{9eWlXPiBY3t`wx)`CiE33vlmTXU|T4pR|;;DoaV<+uK9up06U;MeepM z0p+x(nmR~mA>kaZ&d!SCwLe(1>(x|(vEh3^yIch5AZ24Qr-KuiqI3+ zQVtD$MdVKBzj(YIj^F?LE)MX0uC8ylMYLu64JmryQ&~YlW=Zn!E7%O@xP35DP_3D6 z`*VVVHnoHXug*^6{m{+J|8YUtpl6`(KZM~2Qo?_5X#X#N`tqM4", + "scope": "https://www.googleapis.com/auth/devstorage.read_only"}} + +Either way you can edit the ``Key/value`` pairs directly on the UI + +.. image:: /img/aws-secrets-manager-gcp.png From 3b4c73a644f40013927fb72227dfe018dc47a97f Mon Sep 17 00:00:00 2001 From: Vincent <97131062+vincbeck@users.noreply.github.com> Date: Tue, 8 Oct 2024 16:33:08 -0400 Subject: [PATCH 030/125] Remove `sqlalchemy-redshift` dependency from Amazon provider (#42830) --- airflow/providers/amazon/provider.yaml | 1 - docs/apache-airflow-providers-amazon/index.rst | 1 - generated/provider_dependencies.json | 1 - 3 files changed, 3 deletions(-) diff --git a/airflow/providers/amazon/provider.yaml b/airflow/providers/amazon/provider.yaml index 1316cd05231a0..741efb70e7e51 100644 --- a/airflow/providers/amazon/provider.yaml +++ b/airflow/providers/amazon/provider.yaml @@ -105,7 +105,6 @@ dependencies: - watchtower>=3.0.0,!=3.3.0,<4 - jsonpath_ng>=1.5.3 - redshift_connector>=2.0.918 - - sqlalchemy_redshift>=0.8.6 - asgiref>=2.3.0 - PyAthena>=3.0.10 - jmespath>=0.7.0 diff --git a/docs/apache-airflow-providers-amazon/index.rst b/docs/apache-airflow-providers-amazon/index.rst index c58847dcd8876..88e869bba8016 100644 --- a/docs/apache-airflow-providers-amazon/index.rst +++ b/docs/apache-airflow-providers-amazon/index.rst @@ -119,7 +119,6 @@ PIP package Version required ``watchtower`` ``>=3.0.0,!=3.3.0,<4`` ``jsonpath_ng`` ``>=1.5.3`` ``redshift_connector`` ``>=2.0.918`` -``sqlalchemy_redshift`` ``>=0.8.6`` ``asgiref`` ``>=2.3.0`` ``PyAthena`` ``>=3.0.10`` ``jmespath`` ``>=0.7.0`` diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index e6a0deca23cbf..b1ff0f7dcfab7 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -38,7 +38,6 @@ "jsonpath_ng>=1.5.3", "python3-saml>=1.16.0", "redshift_connector>=2.0.918", - "sqlalchemy_redshift>=0.8.6", "watchtower>=3.0.0,!=3.3.0,<4" ], "devel-deps": [ From 04ba8ee30095adbf8a70373b96739b02aad9b1fb Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Tue, 8 Oct 2024 22:54:58 +0100 Subject: [PATCH 031/125] Simplify code for recent dbt provider change (#42840) Just simplified redundant code from https://github.com/apache/airflow/pull/42737 --- airflow/providers/dbt/cloud/hooks/dbt.py | 30 +++++++++--------------- 1 file changed, 11 insertions(+), 19 deletions(-) diff --git a/airflow/providers/dbt/cloud/hooks/dbt.py b/airflow/providers/dbt/cloud/hooks/dbt.py index b13e1003b9c6f..4007054be69c2 100644 --- a/airflow/providers/dbt/cloud/hooks/dbt.py +++ b/airflow/providers/dbt/cloud/hooks/dbt.py @@ -244,26 +244,18 @@ async def get_job_details( endpoint = f"{account_id}/runs/{run_id}/" headers, tenant = await self.get_headers_tenants_from_connection() url, params = self.get_request_url_params(tenant, endpoint, include_related) - proxies = self._get_proxies(self.connection) + proxies = self._get_proxies(self.connection) or {} + async with aiohttp.ClientSession(headers=headers) as session: - if proxies is not None: - if url.startswith("https"): - proxy = proxies.get("https") - else: - proxy = proxies.get("http") - async with session.get(url, params=params, proxy=proxy) as response: - try: - response.raise_for_status() - return await response.json() - except aiohttp.ClientResponseError as e: - raise AirflowException(f"{e.status}:{e.message}") - else: - async with session.get(url, params=params) as response: - try: - response.raise_for_status() - return await response.json() - except aiohttp.ClientResponseError as e: - raise AirflowException(f"{e.status}:{e.message}") + proxy = proxies.get("https") if proxies and url.startswith("https") else proxies.get("http") + extra_request_args = {} + + if proxy: + extra_request_args["proxy"] = proxy + + async with session.get(url, params=params, **extra_request_args) as response: # type: ignore[arg-type] + response.raise_for_status() + return await response.json() async def get_job_status( self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None From f1664674d859a262e93fb3110557a1e71138ca8b Mon Sep 17 00:00:00 2001 From: harjeevan maan Date: Tue, 8 Oct 2024 20:09:26 -0400 Subject: [PATCH 032/125] - Fixes #42432 (#42770) - Added a unit test --- airflow/utils/db.py | 23 +++++++++++++---------- tests/utils/test_db.py | 9 +++++++++ 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/airflow/utils/db.py b/airflow/utils/db.py index c185c70a9811f..fde641fa9b424 100644 --- a/airflow/utils/db.py +++ b/airflow/utils/db.py @@ -1203,19 +1203,22 @@ def resetdb(session: Session = NEW_SESSION, skip_init: bool = False): if not settings.engine: raise RuntimeError("The settings.engine must be set. This is a critical assertion") log.info("Dropping tables that exist") + original_logging_level = logging.root.level + try: + import_all_models() - import_all_models() - - connection = settings.engine.connect() + connection = settings.engine.connect() - with create_global_lock(session=session, lock=DBLocks.MIGRATIONS), connection.begin(): - drop_airflow_models(connection) - drop_airflow_moved_tables(connection) - external_db_manager = RunDBManager() - external_db_manager.drop_tables(session, connection) + with create_global_lock(session=session, lock=DBLocks.MIGRATIONS), connection.begin(): + drop_airflow_models(connection) + drop_airflow_moved_tables(connection) + external_db_manager = RunDBManager() + external_db_manager.drop_tables(session, connection) - if not skip_init: - initdb(session=session) + if not skip_init: + initdb(session=session) + finally: + logging.root.setLevel(original_logging_level) @provide_session diff --git a/tests/utils/test_db.py b/tests/utils/test_db.py index 2a197c2e6cf72..ce77f80297fcf 100644 --- a/tests/utils/test_db.py +++ b/tests/utils/test_db.py @@ -18,6 +18,7 @@ from __future__ import annotations import inspect +import logging import os import re from contextlib import redirect_stdout @@ -229,6 +230,14 @@ def test_resetdb( else: mock_init.assert_called_once_with(session=session_mock) + def test_resetdb_logging_level(self): + unset_logging_level = logging.root.level + logging.root.setLevel(logging.DEBUG) + set_logging_level = logging.root.level + resetdb() + assert logging.root.level == set_logging_level + assert logging.root.level != unset_logging_level + def test_alembic_configuration(self): with mock.patch.dict( os.environ, {"AIRFLOW__DATABASE__ALEMBIC_INI_FILE_PATH": "/tmp/alembic.ini"}, clear=True From 66a89f1bd471cc7038031109dbff3769504f1c71 Mon Sep 17 00:00:00 2001 From: Bugra Ozturk Date: Wed, 9 Oct 2024 10:09:40 +0200 Subject: [PATCH 033/125] AIP-84 Migrate views /object/historical_metrics_data to Fast API (#42629) * Include object router and migrate objects/historical_metrics_data to FastAPI * Use pyfixture session * Include provide_session for SQLite session thread issue * Make method async and remove unused session from tests * Include return type to generate proper openapi spec * Object definition to Dashboard definition, make safe date Annotated parameter in parameters.py * Include pydantic model for Dashboard Historical Metric Data and convert response to HistoricalMetricDataResponse * Fix conflict and rerun pre-commit hooks * Rename data variable to historical metrics for consistency * Include object router and migrate objects/historical_metrics_data to FastAPI * Object definition to Dashboard definition, make safe date Annotated parameter in parameters.py * Drop data from method name to prevent double Data in the name of objects * Variable name change to prevent shadow naming with method name * Resolve conflicts and rebase again * Resolve conflicts and rebase --- airflow/api_fastapi/openapi/v1-generated.yaml | 156 ++++++++++++++++++ airflow/api_fastapi/parameters.py | 22 +++ airflow/api_fastapi/serializers/dashboard.py | 63 +++++++ airflow/api_fastapi/views/ui/__init__.py | 2 + airflow/api_fastapi/views/ui/dashboard.py | 100 +++++++++++ airflow/ui/openapi-gen/queries/common.ts | 23 +++ airflow/ui/openapi-gen/queries/prefetch.ts | 27 +++ airflow/ui/openapi-gen/queries/queries.ts | 38 ++++- airflow/ui/openapi-gen/queries/suspense.ts | 34 ++++ .../ui/openapi-gen/requests/schemas.gen.ts | 143 ++++++++++++++++ .../ui/openapi-gen/requests/services.gen.ts | 30 ++++ airflow/ui/openapi-gen/requests/types.gen.ts | 74 +++++++++ airflow/www/views.py | 1 + tests/api_fastapi/views/ui/test_dashboard.py | 151 +++++++++++++++++ 14 files changed, 862 insertions(+), 2 deletions(-) create mode 100644 airflow/api_fastapi/serializers/dashboard.py create mode 100644 airflow/api_fastapi/views/ui/dashboard.py create mode 100644 tests/api_fastapi/views/ui/test_dashboard.py diff --git a/airflow/api_fastapi/openapi/v1-generated.yaml b/airflow/api_fastapi/openapi/v1-generated.yaml index f3bc8612bfa76..28723b800879e 100644 --- a/airflow/api_fastapi/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/openapi/v1-generated.yaml @@ -34,6 +34,45 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /ui/dashboard/historical_metrics_data: + get: + tags: + - Dashboard + summary: Historical Metrics + description: Return cluster activity historical metrics. + operationId: historical_metrics + parameters: + - name: start_date + in: query + required: true + schema: + type: string + title: Start Date + - name: end_date + in: query + required: true + schema: + type: string + title: End Date + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HistoricalMetricDataResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /public/dags/: get: tags: @@ -964,6 +1003,50 @@ components: - file_token title: DAGResponse description: DAG serializer for responses. + DAGRunStates: + properties: + queued: + type: integer + title: Queued + running: + type: integer + title: Running + success: + type: integer + title: Success + failed: + type: integer + title: Failed + type: object + required: + - queued + - running + - success + - failed + title: DAGRunStates + description: DAG Run States for responses. + DAGRunTypes: + properties: + backfill: + type: integer + title: Backfill + scheduled: + type: integer + title: Scheduled + manual: + type: integer + title: Manual + dataset_triggered: + type: integer + title: Dataset Triggered + type: object + required: + - backfill + - scheduled + - manual + - dataset_triggered + title: DAGRunTypes + description: DAG Run Types for responses. DagRunState: type: string enum: @@ -1016,6 +1099,79 @@ components: title: Detail type: object title: HTTPValidationError + HistoricalMetricDataResponse: + properties: + dag_run_types: + $ref: '#/components/schemas/DAGRunTypes' + dag_run_states: + $ref: '#/components/schemas/DAGRunStates' + task_instance_states: + $ref: '#/components/schemas/TaskInstantState' + type: object + required: + - dag_run_types + - dag_run_states + - task_instance_states + title: HistoricalMetricDataResponse + description: Historical Metric Data serializer for responses. + TaskInstantState: + properties: + no_status: + type: integer + title: No Status + removed: + type: integer + title: Removed + scheduled: + type: integer + title: Scheduled + queued: + type: integer + title: Queued + running: + type: integer + title: Running + success: + type: integer + title: Success + restarting: + type: integer + title: Restarting + failed: + type: integer + title: Failed + up_for_retry: + type: integer + title: Up For Retry + up_for_reschedule: + type: integer + title: Up For Reschedule + upstream_failed: + type: integer + title: Upstream Failed + skipped: + type: integer + title: Skipped + deferred: + type: integer + title: Deferred + type: object + required: + - no_status + - removed + - scheduled + - queued + - running + - success + - restarting + - failed + - up_for_retry + - up_for_reschedule + - upstream_failed + - skipped + - deferred + title: TaskInstantState + description: TaskInstance serializer for responses. ValidationError: properties: loc: diff --git a/airflow/api_fastapi/parameters.py b/airflow/api_fastapi/parameters.py index 504014602f3b5..59d61ad68600d 100644 --- a/airflow/api_fastapi/parameters.py +++ b/airflow/api_fastapi/parameters.py @@ -18,14 +18,18 @@ from __future__ import annotations from abc import ABC, abstractmethod +from datetime import datetime from typing import TYPE_CHECKING, Any, Generic, List, TypeVar from fastapi import Depends, HTTPException, Query +from pendulum.parsing.exceptions import ParserError +from pydantic import AfterValidator from sqlalchemy import case, or_ from typing_extensions import Annotated, Self from airflow.models.dag import DagModel, DagTag from airflow.models.dagrun import DagRun +from airflow.utils import timezone from airflow.utils.state import DagRunState if TYPE_CHECKING: @@ -235,6 +239,24 @@ def depends(self, last_dag_run_state: DagRunState | None = None) -> _LastDagRunS return self.set_value(last_dag_run_state) +def _safe_parse_datetime(date_to_check: str) -> datetime: + """ + Parse datetime and raise error for invalid dates. + + :param date_to_check: the string value to be parsed + """ + if not date_to_check: + raise ValueError(f"{date_to_check} cannot be None.") + try: + return timezone.parse(date_to_check, strict=True) + except (TypeError, ParserError): + raise HTTPException( + 400, f"Invalid datetime: {date_to_check!r}. Please check the date parameter have this value." + ) + + +# Common Safe DateTime +DateTimeQuery = Annotated[str, AfterValidator(_safe_parse_datetime)] # DAG QueryLimit = Annotated[_LimitFilter, Depends(_LimitFilter().depends)] QueryOffset = Annotated[_OffsetFilter, Depends(_OffsetFilter().depends)] diff --git a/airflow/api_fastapi/serializers/dashboard.py b/airflow/api_fastapi/serializers/dashboard.py new file mode 100644 index 0000000000000..f5a38fa22ea87 --- /dev/null +++ b/airflow/api_fastapi/serializers/dashboard.py @@ -0,0 +1,63 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from pydantic import BaseModel + + +class DAGRunTypes(BaseModel): + """DAG Run Types for responses.""" + + backfill: int + scheduled: int + manual: int + dataset_triggered: int + + +class DAGRunStates(BaseModel): + """DAG Run States for responses.""" + + queued: int + running: int + success: int + failed: int + + +class TaskInstantState(BaseModel): + """TaskInstance serializer for responses.""" + + no_status: int + removed: int + scheduled: int + queued: int + running: int + success: int + restarting: int + failed: int + up_for_retry: int + up_for_reschedule: int + upstream_failed: int + skipped: int + deferred: int + + +class HistoricalMetricDataResponse(BaseModel): + """Historical Metric Data serializer for responses.""" + + dag_run_types: DAGRunTypes + dag_run_states: DAGRunStates + task_instance_states: TaskInstantState diff --git a/airflow/api_fastapi/views/ui/__init__.py b/airflow/api_fastapi/views/ui/__init__.py index 8495ac5e5e6a4..f01686cc99ee2 100644 --- a/airflow/api_fastapi/views/ui/__init__.py +++ b/airflow/api_fastapi/views/ui/__init__.py @@ -18,7 +18,9 @@ from airflow.api_fastapi.views.router import AirflowRouter from airflow.api_fastapi.views.ui.assets import assets_router +from airflow.api_fastapi.views.ui.dashboard import dashboard_router ui_router = AirflowRouter(prefix="/ui") ui_router.include_router(assets_router) +ui_router.include_router(dashboard_router) diff --git a/airflow/api_fastapi/views/ui/dashboard.py b/airflow/api_fastapi/views/ui/dashboard.py new file mode 100644 index 0000000000000..0d6b69a1ced5b --- /dev/null +++ b/airflow/api_fastapi/views/ui/dashboard.py @@ -0,0 +1,100 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from fastapi import Depends +from sqlalchemy import func, select +from sqlalchemy.orm import Session +from typing_extensions import Annotated + +from airflow.api_fastapi.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.parameters import DateTimeQuery +from airflow.api_fastapi.serializers.dashboard import HistoricalMetricDataResponse +from airflow.models.dagrun import DagRun, DagRunType +from airflow.models.taskinstance import TaskInstance +from airflow.utils.state import DagRunState, TaskInstanceState + +if TYPE_CHECKING: + from sqlalchemy.orm import Session +from airflow.api_fastapi.db.common import get_session +from airflow.api_fastapi.views.router import AirflowRouter +from airflow.utils import timezone + +dashboard_router = AirflowRouter(tags=["Dashboard"]) + + +@dashboard_router.get( + "/dashboard/historical_metrics_data", + include_in_schema=False, + responses=create_openapi_http_exception_doc([400]), +) +async def historical_metrics( + start_date: DateTimeQuery, + end_date: DateTimeQuery, + session: Annotated[Session, Depends(get_session)], +) -> HistoricalMetricDataResponse: + """Return cluster activity historical metrics.""" + # DagRuns + dag_run_types = session.execute( + select(DagRun.run_type, func.count(DagRun.run_id)) + .where( + DagRun.start_date >= start_date, + func.coalesce(DagRun.end_date, timezone.utcnow()) <= end_date, + ) + .group_by(DagRun.run_type) + ).all() + + dag_run_states = session.execute( + select(DagRun.state, func.count(DagRun.run_id)) + .where( + DagRun.start_date >= start_date, + func.coalesce(DagRun.end_date, timezone.utcnow()) <= end_date, + ) + .group_by(DagRun.state) + ).all() + + # TaskInstances + task_instance_states = session.execute( + select(TaskInstance.state, func.count(TaskInstance.run_id)) + .join(TaskInstance.dag_run) + .where( + DagRun.start_date >= start_date, + func.coalesce(DagRun.end_date, timezone.utcnow()) <= end_date, + ) + .group_by(TaskInstance.state) + ).all() + + # Combining historical metrics response as dictionary + historical_metrics_response = { + "dag_run_types": { + **{dag_run_type.value: 0 for dag_run_type in DagRunType}, + **dict(dag_run_types), + }, + "dag_run_states": { + **{dag_run_state.value: 0 for dag_run_state in DagRunState}, + **dict(dag_run_states), + }, + "task_instance_states": { + "no_status": 0, + **{ti_state.value: 0 for ti_state in TaskInstanceState}, + **{ti_state or "no_status": sum_value for ti_state, sum_value in task_instance_states}, + }, + } + + return HistoricalMetricDataResponse.model_validate(historical_metrics_response, from_attributes=True) diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index afd7afd5bcc06..fbbbac5d60625 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -5,6 +5,7 @@ import { AssetService, ConnectionService, DagService, + DashboardService, VariableService, } from "../requests/services.gen"; import { DagRunState } from "../requests/types.gen"; @@ -25,6 +26,28 @@ export const UseAssetServiceNextRunAssetsKeyFn = ( }, queryKey?: Array, ) => [useAssetServiceNextRunAssetsKey, ...(queryKey ?? [{ dagId }])]; +export type DashboardServiceHistoricalMetricsDefaultResponse = Awaited< + ReturnType +>; +export type DashboardServiceHistoricalMetricsQueryResult< + TData = DashboardServiceHistoricalMetricsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useDashboardServiceHistoricalMetricsKey = + "DashboardServiceHistoricalMetrics"; +export const UseDashboardServiceHistoricalMetricsKeyFn = ( + { + endDate, + startDate, + }: { + endDate: string; + startDate: string; + }, + queryKey?: Array, +) => [ + useDashboardServiceHistoricalMetricsKey, + ...(queryKey ?? [{ endDate, startDate }]), +]; export type DagServiceGetDagsDefaultResponse = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow/ui/openapi-gen/queries/prefetch.ts index cbb43cca3abb1..7c8555b29d264 100644 --- a/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow/ui/openapi-gen/queries/prefetch.ts @@ -5,6 +5,7 @@ import { AssetService, ConnectionService, DagService, + DashboardService, } from "../requests/services.gen"; import { DagRunState } from "../requests/types.gen"; import * as Common from "./common"; @@ -28,6 +29,32 @@ export const prefetchUseAssetServiceNextRunAssets = ( queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }), queryFn: () => AssetService.nextRunAssets({ dagId }), }); +/** + * Historical Metrics + * Return cluster activity historical metrics. + * @param data The data for the request. + * @param data.startDate + * @param data.endDate + * @returns HistoricalMetricDataResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDashboardServiceHistoricalMetrics = ( + queryClient: QueryClient, + { + endDate, + startDate, + }: { + endDate: string; + startDate: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ + endDate, + startDate, + }), + queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }), + }); /** * Get Dags * Get all DAGs. diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index 22d58eadda0cb..9137ea4ed01bc 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -1,15 +1,16 @@ // generated with @7nohe/openapi-react-query-codegen@1.6.0 import { - useMutation, UseMutationOptions, - useQuery, UseQueryOptions, + useMutation, + useQuery, } from "@tanstack/react-query"; import { AssetService, ConnectionService, DagService, + DashboardService, VariableService, } from "../requests/services.gen"; import { DAGPatchBody, DagRunState } from "../requests/types.gen"; @@ -40,6 +41,39 @@ export const useAssetServiceNextRunAssets = < queryFn: () => AssetService.nextRunAssets({ dagId }) as TData, ...options, }); +/** + * Historical Metrics + * Return cluster activity historical metrics. + * @param data The data for the request. + * @param data.startDate + * @param data.endDate + * @returns HistoricalMetricDataResponse Successful Response + * @throws ApiError + */ +export const useDashboardServiceHistoricalMetrics = < + TData = Common.DashboardServiceHistoricalMetricsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + endDate, + startDate, + }: { + endDate: string; + startDate: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn( + { endDate, startDate }, + queryKey, + ), + queryFn: () => + DashboardService.historicalMetrics({ endDate, startDate }) as TData, + ...options, + }); /** * Get Dags * Get all DAGs. diff --git a/airflow/ui/openapi-gen/queries/suspense.ts b/airflow/ui/openapi-gen/queries/suspense.ts index 04d7eb94b3208..70be4beb0d479 100644 --- a/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow/ui/openapi-gen/queries/suspense.ts @@ -5,6 +5,7 @@ import { AssetService, ConnectionService, DagService, + DashboardService, } from "../requests/services.gen"; import { DagRunState } from "../requests/types.gen"; import * as Common from "./common"; @@ -34,6 +35,39 @@ export const useAssetServiceNextRunAssetsSuspense = < queryFn: () => AssetService.nextRunAssets({ dagId }) as TData, ...options, }); +/** + * Historical Metrics + * Return cluster activity historical metrics. + * @param data The data for the request. + * @param data.startDate + * @param data.endDate + * @returns HistoricalMetricDataResponse Successful Response + * @throws ApiError + */ +export const useDashboardServiceHistoricalMetricsSuspense = < + TData = Common.DashboardServiceHistoricalMetricsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + endDate, + startDate, + }: { + endDate: string; + startDate: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn( + { endDate, startDate }, + queryKey, + ), + queryFn: () => + DashboardService.historicalMetrics({ endDate, startDate }) as TData, + ...options, + }); /** * Get Dags * Get all DAGs. diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index 910354423bac3..26fe1180ae7b5 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -784,6 +784,56 @@ export const $DAGResponse = { description: "DAG serializer for responses.", } as const; +export const $DAGRunStates = { + properties: { + queued: { + type: "integer", + title: "Queued", + }, + running: { + type: "integer", + title: "Running", + }, + success: { + type: "integer", + title: "Success", + }, + failed: { + type: "integer", + title: "Failed", + }, + }, + type: "object", + required: ["queued", "running", "success", "failed"], + title: "DAGRunStates", + description: "DAG Run States for responses.", +} as const; + +export const $DAGRunTypes = { + properties: { + backfill: { + type: "integer", + title: "Backfill", + }, + scheduled: { + type: "integer", + title: "Scheduled", + }, + manual: { + type: "integer", + title: "Manual", + }, + dataset_triggered: { + type: "integer", + title: "Dataset Triggered", + }, + }, + type: "object", + required: ["backfill", "scheduled", "manual", "dataset_triggered"], + title: "DAGRunTypes", + description: "DAG Run Types for responses.", +} as const; + export const $DagRunState = { type: "string", enum: ["queued", "running", "success", "failed"], @@ -847,6 +897,99 @@ export const $HTTPValidationError = { title: "HTTPValidationError", } as const; +export const $HistoricalMetricDataResponse = { + properties: { + dag_run_types: { + $ref: "#/components/schemas/DAGRunTypes", + }, + dag_run_states: { + $ref: "#/components/schemas/DAGRunStates", + }, + task_instance_states: { + $ref: "#/components/schemas/TaskInstantState", + }, + }, + type: "object", + required: ["dag_run_types", "dag_run_states", "task_instance_states"], + title: "HistoricalMetricDataResponse", + description: "Historical Metric Data serializer for responses.", +} as const; + +export const $TaskInstantState = { + properties: { + no_status: { + type: "integer", + title: "No Status", + }, + removed: { + type: "integer", + title: "Removed", + }, + scheduled: { + type: "integer", + title: "Scheduled", + }, + queued: { + type: "integer", + title: "Queued", + }, + running: { + type: "integer", + title: "Running", + }, + success: { + type: "integer", + title: "Success", + }, + restarting: { + type: "integer", + title: "Restarting", + }, + failed: { + type: "integer", + title: "Failed", + }, + up_for_retry: { + type: "integer", + title: "Up For Retry", + }, + up_for_reschedule: { + type: "integer", + title: "Up For Reschedule", + }, + upstream_failed: { + type: "integer", + title: "Upstream Failed", + }, + skipped: { + type: "integer", + title: "Skipped", + }, + deferred: { + type: "integer", + title: "Deferred", + }, + }, + type: "object", + required: [ + "no_status", + "removed", + "scheduled", + "queued", + "running", + "success", + "restarting", + "failed", + "up_for_retry", + "up_for_reschedule", + "upstream_failed", + "skipped", + "deferred", + ], + title: "TaskInstantState", + description: "TaskInstance serializer for responses.", +} as const; + export const $ValidationError = { properties: { loc: { diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 72fd2f68f1000..268f636404b16 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -5,6 +5,8 @@ import { request as __request } from "./core/request"; import type { NextRunAssetsData, NextRunAssetsResponse, + HistoricalMetricsData, + HistoricalMetricsResponse, GetDagsData, GetDagsResponse, PatchDagsData, @@ -45,6 +47,34 @@ export class AssetService { } } +export class DashboardService { + /** + * Historical Metrics + * Return cluster activity historical metrics. + * @param data The data for the request. + * @param data.startDate + * @param data.endDate + * @returns HistoricalMetricDataResponse Successful Response + * @throws ApiError + */ + public static historicalMetrics( + data: HistoricalMetricsData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/ui/dashboard/historical_metrics_data", + query: { + start_date: data.startDate, + end_date: data.endDate, + }, + errors: { + 400: "Bad Request", + 422: "Validation Error", + }, + }); + } +} + export class DagService { /** * Get Dags diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index d07d980397c83..268960a596870 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -120,6 +120,26 @@ export type DAGResponse = { readonly file_token: string; }; +/** + * DAG Run States for responses. + */ +export type DAGRunStates = { + queued: number; + running: number; + success: number; + failed: number; +}; + +/** + * DAG Run Types for responses. + */ +export type DAGRunTypes = { + backfill: number; + scheduled: number; + manual: number; + dataset_triggered: number; +}; + /** * All possible states that a DagRun can be in. * @@ -152,6 +172,34 @@ export type HTTPValidationError = { detail?: Array; }; +/** + * Historical Metric Data serializer for responses. + */ +export type HistoricalMetricDataResponse = { + dag_run_types: DAGRunTypes; + dag_run_states: DAGRunStates; + task_instance_states: TaskInstantState; +}; + +/** + * TaskInstance serializer for responses. + */ +export type TaskInstantState = { + no_status: number; + removed: number; + scheduled: number; + queued: number; + running: number; + success: number; + restarting: number; + failed: number; + up_for_retry: number; + up_for_reschedule: number; + upstream_failed: number; + skipped: number; + deferred: number; +}; + export type ValidationError = { loc: Array; msg: string; @@ -166,6 +214,13 @@ export type NextRunAssetsResponse = { [key: string]: unknown; }; +export type HistoricalMetricsData = { + endDate: string; + startDate: string; +}; + +export type HistoricalMetricsResponse = HistoricalMetricDataResponse; + export type GetDagsData = { dagDisplayNamePattern?: string | null; dagIdPattern?: string | null; @@ -246,6 +301,25 @@ export type $OpenApiTs = { }; }; }; + "/ui/dashboard/historical_metrics_data": { + get: { + req: HistoricalMetricsData; + res: { + /** + * Successful Response + */ + 200: HistoricalMetricDataResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; "/public/dags/": { get: { req: GetDagsData; diff --git a/airflow/www/views.py b/airflow/www/views.py index 7782da955c92e..8dba4c4fcc0d8 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -3348,6 +3348,7 @@ def grid_data(self): @expose("/object/historical_metrics_data") @auth.has_access_view(AccessView.CLUSTER_ACTIVITY) + @mark_fastapi_migration_done def historical_metrics_data(self): """Return cluster activity historical metrics.""" start_date = _safe_parse_datetime(request.args.get("start_date")) diff --git a/tests/api_fastapi/views/ui/test_dashboard.py b/tests/api_fastapi/views/ui/test_dashboard.py new file mode 100644 index 0000000000000..970b79ad3568c --- /dev/null +++ b/tests/api_fastapi/views/ui/test_dashboard.py @@ -0,0 +1,151 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import timedelta + +import pendulum +import pytest + +from airflow.models import DagBag +from airflow.operators.empty import EmptyOperator +from airflow.utils.state import DagRunState, TaskInstanceState +from airflow.utils.types import DagRunType +from tests.test_utils.db import clear_db_runs + +pytestmark = pytest.mark.db_test + + +@pytest.fixture(autouse=True, scope="module") +def examples_dag_bag(): + # Speed up: We don't want example dags for this module + + return DagBag(include_examples=False, read_dags_from_db=True) + + +@pytest.fixture(autouse=True) +def clean(): + clear_db_runs() + yield + clear_db_runs() + + +# freeze time fixture so that it is applied before `make_dag_runs` is! +@pytest.fixture +def freeze_time_for_dagruns(time_machine): + time_machine.move_to("2023-05-02T00:00:00+00:00", tick=False) + + +@pytest.fixture +def make_dag_runs(dag_maker, session, time_machine): + with dag_maker( + dag_id="test_dag_id", + serialized=True, + session=session, + start_date=pendulum.DateTime(2023, 2, 1, 0, 0, 0, tzinfo=pendulum.UTC), + ): + EmptyOperator(task_id="task_1") >> EmptyOperator(task_id="task_2") + + date = dag_maker.dag.start_date + + run1 = dag_maker.create_dagrun( + run_id="run_1", + state=DagRunState.SUCCESS, + run_type=DagRunType.SCHEDULED, + execution_date=date, + start_date=date, + ) + + run2 = dag_maker.create_dagrun( + run_id="run_2", + state=DagRunState.FAILED, + run_type=DagRunType.DATASET_TRIGGERED, + execution_date=date + timedelta(days=1), + start_date=date + timedelta(days=1), + ) + + run3 = dag_maker.create_dagrun( + run_id="run_3", + state=DagRunState.RUNNING, + run_type=DagRunType.SCHEDULED, + execution_date=pendulum.DateTime(2023, 2, 3, 0, 0, 0, tzinfo=pendulum.UTC), + start_date=pendulum.DateTime(2023, 2, 3, 0, 0, 0, tzinfo=pendulum.UTC), + ) + run3.end_date = None + + for ti in run1.task_instances: + ti.state = TaskInstanceState.SUCCESS + + for ti in run2.task_instances: + ti.state = TaskInstanceState.FAILED + + dag_maker.dagbag.sync_to_db() + time_machine.move_to("2023-07-02T00:00:00+00:00", tick=False) + + +class TestHistoricalMetricsDataEndpoint: + @pytest.mark.usefixtures("freeze_time_for_dagruns", "make_dag_runs") + def test_historical_metrics_data(self, test_client, time_machine): + params = {"start_date": "2023-01-01T00:00", "end_date": "2023-08-02T00:00"} + response = test_client.get("/ui/dashboard/historical_metrics_data", params=params) + + assert response.status_code == 200 + assert response.json() == { + "dag_run_states": {"failed": 1, "queued": 0, "running": 1, "success": 1}, + "dag_run_types": {"backfill": 0, "dataset_triggered": 1, "manual": 0, "scheduled": 2}, + "task_instance_states": { + "deferred": 0, + "failed": 2, + "no_status": 2, + "queued": 0, + "removed": 0, + "restarting": 0, + "running": 0, + "scheduled": 0, + "skipped": 0, + "success": 2, + "up_for_reschedule": 0, + "up_for_retry": 0, + "upstream_failed": 0, + }, + } + + @pytest.mark.usefixtures("freeze_time_for_dagruns", "make_dag_runs") + def test_historical_metrics_data_date_filters(self, test_client): + params = {"start_date": "2023-02-02T00:00", "end_date": "2023-06-02T00:00"} + response = test_client.get("/ui/dashboard/historical_metrics_data", params=params) + assert response.status_code == 200 + assert response.json() == { + "dag_run_states": {"failed": 1, "queued": 0, "running": 0, "success": 0}, + "dag_run_types": {"backfill": 0, "dataset_triggered": 1, "manual": 0, "scheduled": 0}, + "task_instance_states": { + "deferred": 0, + "failed": 2, + "no_status": 0, + "queued": 0, + "removed": 0, + "restarting": 0, + "running": 0, + "scheduled": 0, + "skipped": 0, + "success": 0, + "up_for_reschedule": 0, + "up_for_retry": 0, + "upstream_failed": 0, + }, + } From 9c4b81d71f3caee3a3b1c6d9c626ee5d16f0db7c Mon Sep 17 00:00:00 2001 From: Karthikeyan Singaravelan Date: Wed, 9 Oct 2024 13:45:14 +0530 Subject: [PATCH 034/125] Add support to filter by last dagrun state in UI. (#42779) * Add support to filter by last dagrun state in UI. * Fix lint errors. * Fix lint errors. * Fix PR comments over null checks and query parameter to be last_dag_run_state. --- airflow/ui/src/pages/DagsList/DagsFilters.tsx | 55 +++++++++++++++++-- airflow/ui/src/pages/DagsList/DagsList.tsx | 5 +- 2 files changed, 55 insertions(+), 5 deletions(-) diff --git a/airflow/ui/src/pages/DagsList/DagsFilters.tsx b/airflow/ui/src/pages/DagsList/DagsFilters.tsx index cb2be8322e500..6316816f48f95 100644 --- a/airflow/ui/src/pages/DagsList/DagsFilters.tsx +++ b/airflow/ui/src/pages/DagsList/DagsFilters.tsx @@ -25,11 +25,17 @@ import { useTableURLState } from "src/components/DataTable/useTableUrlState"; import { QuickFilterButton } from "src/components/QuickFilterButton"; const PAUSED_PARAM = "paused"; +const STATE_PARAM = "last_dag_run_state"; export const DagsFilters = () => { const [searchParams, setSearchParams] = useSearchParams(); const showPaused = searchParams.get(PAUSED_PARAM); + const state = searchParams.get(STATE_PARAM); + const isAll = state === null; + const isRunning = state === "running"; + const isFailed = state === "failed"; + const isSuccess = state === "success"; const { setTableURLState, tableURLState } = useTableURLState(); const { pagination, sorting } = tableURLState; @@ -51,6 +57,23 @@ export const DagsFilters = () => { [pagination, searchParams, setSearchParams, setTableURLState, sorting], ); + const handleStateChange: React.MouseEventHandler = + useCallback( + ({ currentTarget: { value } }) => { + if (value === "all") { + searchParams.delete(STATE_PARAM); + } else { + searchParams.set(STATE_PARAM, value); + } + setSearchParams(searchParams); + setTableURLState({ + pagination: { ...pagination, pageIndex: 0 }, + sorting, + }); + }, + [pagination, searchParams, setSearchParams, setTableURLState, sorting], + ); + return ( @@ -59,10 +82,34 @@ export const DagsFilters = () => { State: - All - Failed - Running - Successful + + All + + + Failed + + + Running + + + Successful + diff --git a/airflow/ui/src/pages/DagsList/DagsList.tsx b/airflow/ui/src/pages/DagsList/DagsList.tsx index 7b87e8d253fb2..178663baf7478 100644 --- a/airflow/ui/src/pages/DagsList/DagsList.tsx +++ b/airflow/ui/src/pages/DagsList/DagsList.tsx @@ -29,7 +29,7 @@ import { type ChangeEventHandler, useCallback, useState } from "react"; import { useSearchParams } from "react-router-dom"; import { useDagServiceGetDags } from "openapi/queries"; -import type { DAGResponse } from "openapi/requests/types.gen"; +import type { DAGResponse, DagRunState } from "openapi/requests/types.gen"; import { DataTable } from "src/components/DataTable"; import { ToggleTableDisplay } from "src/components/DataTable/ToggleTableDisplay"; import type { CardDef } from "src/components/DataTable/types"; @@ -97,12 +97,14 @@ const cardDef: CardDef = { }; const PAUSED_PARAM = "paused"; +const STATE_PARAM = "last_dag_run_state"; export const DagsList = () => { const [searchParams] = useSearchParams(); const [display, setDisplay] = useState<"card" | "table">("card"); const showPaused = searchParams.get(PAUSED_PARAM); + const lastDagRunState = searchParams.get(STATE_PARAM) as DagRunState; const { setTableURLState, tableURLState } = useTableURLState(); const { pagination, sorting } = tableURLState; @@ -112,6 +114,7 @@ export const DagsList = () => { const orderBy = sort ? `${sort.desc ? "-" : ""}${sort.id}` : undefined; const { data, isFetching, isLoading } = useDagServiceGetDags({ + lastDagRunState, limit: pagination.pageSize, offset: pagination.pageIndex * pagination.pageSize, onlyActive: true, From bf46f68a7c1dedb6b5abc8dbb410af7e8cd88d55 Mon Sep 17 00:00:00 2001 From: Wei Lee Date: Wed, 9 Oct 2024 18:52:10 +0800 Subject: [PATCH 035/125] feat(datasets): make strict_dataset_uri_validation default to True (#41814) * feat(datasets): make strict_dataset_uri_validation default to True * docs(assets): add uri scheme restrict instruction for AIP-60 * feat(assets): add an error logging to URI that is not AIP-60 compliant * Better test string --------- Co-authored-by: Tzu-ping Chung --- airflow/assets/__init__.py | 20 ++-- airflow/config_templates/config.yml | 4 +- .../authoring-and-scheduling/assets.rst | 109 ++++++++++-------- newsfragments/41814.significant.rst | 1 + tests/assets/tests_asset.py | 11 -- tests/io/test_path.py | 2 +- 6 files changed, 77 insertions(+), 70 deletions(-) create mode 100644 newsfragments/41814.significant.rst diff --git a/airflow/assets/__init__.py b/airflow/assets/__init__.py index deb9aa593ded5..e11b9c49df36d 100644 --- a/airflow/assets/__init__.py +++ b/airflow/assets/__init__.py @@ -17,6 +17,7 @@ from __future__ import annotations +import logging import os import urllib.parse import warnings @@ -41,6 +42,9 @@ __all__ = ["Asset", "AssetAll", "AssetAny"] +log = logging.getLogger(__name__) + + def normalize_noop(parts: SplitResult) -> SplitResult: """ Place-hold a :class:`~urllib.parse.SplitResult`` normalizer. @@ -109,14 +113,16 @@ def _sanitize_uri(uri: str) -> str: try: parsed = normalizer(parsed) except ValueError as exception: - if conf.getboolean("core", "strict_asset_uri_validation", fallback=False): + if conf.getboolean("core", "strict_asset_uri_validation", fallback=True): + log.error( + ( + "The Asset URI %s is not AIP-60 compliant: %s. " + "Please check https://airflow.apache.org/docs/apache-airflow/stable/authoring-and-scheduling/assets.html" + ), + uri, + exception, + ) raise - warnings.warn( - f"The Asset URI {uri} is not AIP-60 compliant: {exception}. " - f"In Airflow 3, this will raise an exception.", - UserWarning, - stacklevel=3, - ) return urllib.parse.urlunsplit(parsed) diff --git a/airflow/config_templates/config.yml b/airflow/config_templates/config.yml index b96c07f237376..0be77a3b6829a 100644 --- a/airflow/config_templates/config.yml +++ b/airflow/config_templates/config.yml @@ -486,9 +486,7 @@ core: strict_asset_uri_validation: description: | Asset URI validation should raise an exception if it is not compliant with AIP-60. - By default this configuration is false, meaning that Airflow 2.x only warns the user. - In Airflow 3, this configuration will be enabled by default. - default: "False" + default: "True" example: ~ version_added: 2.9.2 type: boolean diff --git a/docs/apache-airflow/authoring-and-scheduling/assets.rst b/docs/apache-airflow/authoring-and-scheduling/assets.rst index d37143367fabe..7940a9051679c 100644 --- a/docs/apache-airflow/authoring-and-scheduling/assets.rst +++ b/docs/apache-airflow/authoring-and-scheduling/assets.rst @@ -23,23 +23,23 @@ Data-aware scheduling Quickstart ---------- -In addition to scheduling DAGs based on time, you can also schedule DAGs to run based on when a task updates a asset. +In addition to scheduling DAGs based on time, you can also schedule DAGs to run based on when a task updates an asset. .. code-block:: python - from airflow.assets import asset + from airflow.assets import Asset with DAG(...): MyOperator( # this task updates example.csv - outlets=[asset("s3://asset-bucket/example.csv")], + outlets=[Asset("s3://asset-bucket/example.csv")], ..., ) with DAG( # this DAG should be run when example.csv is updated (by dag1) - schedule=[asset("s3://asset-bucket/example.csv")], + schedule=[Asset("s3://asset-bucket/example.csv")], ..., ): ... @@ -48,7 +48,7 @@ In addition to scheduling DAGs based on time, you can also schedule DAGs to run .. image:: /img/asset-scheduled-dags.png -What is a "asset"? +What is an "Asset"? -------------------- An Airflow asset is a logical grouping of data. Upstream producer tasks can update assets, and asset updates contribute to scheduling downstream consumer DAGs. @@ -57,13 +57,19 @@ An Airflow asset is a logical grouping of data. Upstream producer tasks can upda .. code-block:: python - from airflow.assets import asset + from airflow.assets import Asset - example_asset = asset("s3://asset-bucket/example.csv") + example_asset = Asset("s3://asset-bucket/example.csv") Airflow makes no assumptions about the content or location of the data represented by the URI, and treats the URI like a string. This means that Airflow treats any regular expressions, like ``input_\d+.csv``, or file glob patterns, such as ``input_2022*.csv``, as an attempt to create multiple assets from one declaration, and they will not work. -You must create assets with a valid URI. Airflow core and providers define various URI schemes that you can use, such as ``file`` (core), ``postgres`` (by the Postgres provider), and ``s3`` (by the Amazon provider). Third-party providers and plugins might also provide their own schemes. These pre-defined schemes have individual semantics that are expected to be followed. +You must create assets with a valid URI. Airflow core and providers define various URI schemes that you can use, such as ``file`` (core), ``postgres`` (by the Postgres provider), and ``s3`` (by the Amazon provider). Third-party providers and plugins might also provide their own schemes. These pre-defined schemes have individual semantics that are expected to be followed. You can use the optional name argument to provide a more human-readable identifier to the asset. + +.. code-block:: python + + from airflow.assets import Asset + + example_asset = Asset(uri="s3://asset-bucket/example.csv", name="bucket-1") What is valid URI? ------------------ @@ -72,6 +78,13 @@ Technically, the URI must conform to the valid character set in RFC 3986, which The URI is also case sensitive, so ``s3://example/asset`` and ``s3://Example/asset`` are considered different. Note that the *host* part of the URI is also case sensitive, which differs from RFC 3986. +For pre-defined schemes (e.g., ``file``, ``postgres``, and ``s3``), you must provide a meaning URI. If you can't provide one, use another scheme altogether that don't have the semantic restrictions. Airflow will never require a semantic for user-defined URI schemes (with a prefix x-), so that can be a good alternative. If you have a URI that can only be obtained later (e.g., during task execution), consider using ``AssetAlias`` instead and update the URI later. + +.. code-block:: python + + # invalid asset: + must_contain_bucket_name = Asset("s3://") + Do not use the ``airflow`` scheme, which is is reserved for Airflow's internals. Airflow always prefers using lower cases in schemes, and case sensitivity is needed in the host part of the URI to correctly distinguish between resources. @@ -79,65 +92,65 @@ Airflow always prefers using lower cases in schemes, and case sensitivity is nee .. code-block:: python # invalid assets: - reserved = asset("airflow://example_asset") - not_ascii = asset("èxample_datašet") + reserved = Asset("airflow://example_asset") + not_ascii = Asset("èxample_datašet") If you want to define assets with a scheme that doesn't include additional semantic constraints, use a scheme with the prefix ``x-``. Airflow skips any semantic validation on URIs with these schemes. .. code-block:: python # valid asset, treated as a plain string - my_ds = asset("x-my-thing://foobarbaz") + my_ds = Asset("x-my-thing://foobarbaz") The identifier does not have to be absolute; it can be a scheme-less, relative URI, or even just a simple path or string: .. code-block:: python # valid assets: - schemeless = asset("//example/asset") - csv_file = asset("example_asset") + schemeless = Asset("//example/asset") + csv_file = Asset("example_asset") Non-absolute identifiers are considered plain strings that do not carry any semantic meanings to Airflow. Extra information on asset ---------------------------- -If needed, you can include an extra dictionary in a asset: +If needed, you can include an extra dictionary in an asset: .. code-block:: python - example_asset = asset( + example_asset = Asset( "s3://asset/example.csv", extra={"team": "trainees"}, ) -This can be used to supply custom description to the asset, such as who has ownership to the target file, or what the file is for. The extra information does not affect a asset's identity. This means a DAG will be triggered by a asset with an identical URI, even if the extra dict is different: +This can be used to supply custom description to the asset, such as who has ownership to the target file, or what the file is for. The extra information does not affect an asset's identity. This means a DAG will be triggered by an asset with an identical URI, even if the extra dict is different: .. code-block:: python with DAG( dag_id="consumer", - schedule=[asset("s3://asset/example.csv", extra={"different": "extras"})], + schedule=[Asset("s3://asset/example.csv", extra={"different": "extras"})], ): ... with DAG(dag_id="producer", ...): MyOperator( # triggers "consumer" with the given extra! - outlets=[asset("s3://asset/example.csv", extra={"team": "trainees"})], + outlets=[Asset("s3://asset/example.csv", extra={"team": "trainees"})], ..., ) -.. note:: **Security Note:** asset URI and extra fields are not encrypted, they are stored in cleartext in Airflow's metadata database. Do NOT store any sensitive values, especially credentials, in either asset URIs or extra key values! +.. note:: **Security Note:** Asset URI and extra fields are not encrypted, they are stored in cleartext in Airflow's metadata database. Do NOT store any sensitive values, especially credentials, in either asset URIs or extra key values! How to use assets in your DAGs -------------------------------- -You can use assets to specify data dependencies in your DAGs. The following example shows how after the ``producer`` task in the ``producer`` DAG successfully completes, Airflow schedules the ``consumer`` DAG. Airflow marks a asset as ``updated`` only if the task completes successfully. If the task fails or if it is skipped, no update occurs, and Airflow doesn't schedule the ``consumer`` DAG. +You can use assets to specify data dependencies in your DAGs. The following example shows how after the ``producer`` task in the ``producer`` DAG successfully completes, Airflow schedules the ``consumer`` DAG. Airflow marks an asset as ``updated`` only if the task completes successfully. If the task fails or if it is skipped, no update occurs, and Airflow doesn't schedule the ``consumer`` DAG. .. code-block:: python - example_asset = asset("s3://asset/example.csv") + example_asset = Asset("s3://asset/example.csv") with DAG(dag_id="producer", ...): BashOperator(task_id="producer", outlets=[example_asset], ...) @@ -147,7 +160,7 @@ You can use assets to specify data dependencies in your DAGs. The following exam You can find a listing of the relationships between assets and DAGs in the -:ref:`assets View` +:ref:`Assets View` Multiple assets ----------------- @@ -228,17 +241,17 @@ Attaching extra information to an emitting asset event .. versionadded:: 2.10.0 -A task with a asset outlet can optionally attach extra information before it emits a asset event. This is different -from `Extra information on asset`_. Extra information on a asset statically describes the entity pointed to by the asset URI; extra information on the *asset event* instead should be used to annotate the triggering data change, such as how many rows in the database are changed by the update, or the date range covered by it. +A task with an asset outlet can optionally attach extra information before it emits an asset event. This is different +from `Extra information on asset`_. Extra information on an asset statically describes the entity pointed to by the asset URI; extra information on the *asset event* instead should be used to annotate the triggering data change, such as how many rows in the database are changed by the update, or the date range covered by it. The easiest way to attach extra information to the asset event is by ``yield``-ing a ``Metadata`` object from a task: .. code-block:: python - from airflow.assets import asset + from airflow.assets import Asset from airflow.assets.metadata import Metadata - example_s3_asset = asset("s3://asset/example.csv") + example_s3_asset = Asset("s3://asset/example.csv") @task(outlets=[example_s3_asset]) @@ -268,7 +281,7 @@ Fetching information from previously emitted asset events .. versionadded:: 2.10.0 -Events of a asset defined in a task's ``outlets``, as described in the previous section, can be read by a task that declares the same asset in its ``inlets``. A asset event entry contains ``extra`` (see previous section for details), ``timestamp`` indicating when the event was emitted from a task, and ``source_task_instance`` linking the event back to its source. +Events of an asset defined in a task's ``outlets``, as described in the previous section, can be read by a task that declares the same asset in its ``inlets``. A asset event entry contains ``extra`` (see previous section for details), ``timestamp`` indicating when the event was emitted from a task, and ``source_task_instance`` linking the event back to its source. Inlet asset events can be read with the ``inlet_events`` accessor in the execution context. Continuing from the ``write_to_s3`` task in the previous section: @@ -291,7 +304,7 @@ Example: .. code-block:: python - example_snowflake_asset = asset("snowflake://my_db/my_schema/my_table") + example_snowflake_asset = Asset("snowflake://my_db/my_schema/my_table") with DAG(dag_id="load_snowflake_data", schedule="@hourly", ...): SQLExecuteQueryOperator( @@ -332,7 +345,7 @@ In this example, the DAG ``waiting_for_asset_1_and_2`` will be triggered when ta with DAG( dag_id="waiting_for_asset_1_and_2", - schedule=[asset("asset-1"), asset("asset-2")], + schedule=[Asset("asset-1"), Asset("asset-2")], ..., ): ... @@ -344,8 +357,8 @@ In this example, the DAG ``waiting_for_asset_1_and_2`` will be triggered when ta * Get queued asset events for a DAG: ``/dags/{dag_id}/assets/queuedEvent`` * Delete a queued asset event for a DAG: ``/assets/queuedEvent/{uri}`` * Delete queued asset events for a DAG: ``/dags/{dag_id}/assets/queuedEvent`` -* Get queued asset events for a asset: ``/dags/{dag_id}/assets/queuedEvent/{uri}`` -* Delete queued asset events for a asset: ``DELETE /dags/{dag_id}/assets/queuedEvent/{uri}`` +* Get queued asset events for an asset: ``/dags/{dag_id}/assets/queuedEvent/{uri}`` +* Delete queued asset events for an asset: ``DELETE /dags/{dag_id}/assets/queuedEvent/{uri}`` For how to use REST API and the parameters needed for these endpoints, please refer to :doc:`Airflow API `. @@ -373,8 +386,8 @@ To schedule a DAG to run only when two specific assets have both been updated, u .. code-block:: python - dag1_asset = asset("s3://dag1/output_1.txt") - dag2_asset = asset("s3://dag2/output_1.txt") + dag1_asset = Asset("s3://dag1/output_1.txt") + dag2_asset = Asset("s3://dag2/output_1.txt") with DAG( # Consume asset 1 and 2 with asset expressions @@ -402,7 +415,7 @@ For scenarios requiring more intricate conditions, such as triggering a DAG when .. code-block:: python - dag3_asset = asset("s3://dag3/output_3.txt") + dag3_asset = Asset("s3://dag3/output_3.txt") with DAG( # Consume asset 1 or both 2 and 3 with asset expressions @@ -421,9 +434,9 @@ How to use AssetAlias ``AssetAlias`` has one single argument ``name`` that uniquely identifies the asset. The task must first declare the alias as an outlet, and use ``outlet_events`` or yield ``Metadata`` to add events to it. -The following example creates a asset event against the S3 URI ``f"s3://bucket/my-task"`` with optional extra information ``extra``. If the asset does not exist, Airflow will dynamically create it and log a warning message. +The following example creates an asset event against the S3 URI ``f"s3://bucket/my-task"`` with optional extra information ``extra``. If the asset does not exist, Airflow will dynamically create it and log a warning message. -**Emit a asset event during task execution through outlet_events** +**Emit an asset event during task execution through outlet_events** .. code-block:: python @@ -432,10 +445,10 @@ The following example creates a asset event against the S3 URI ``f"s3://bucket/m @task(outlets=[AssetAlias("my-task-outputs")]) def my_task_with_outlet_events(*, outlet_events): - outlet_events["my-task-outputs"].add(asset("s3://bucket/my-task"), extra={"k": "v"}) + outlet_events["my-task-outputs"].add(Asset("s3://bucket/my-task"), extra={"k": "v"}) -**Emit a asset event during task execution through yielding Metadata** +**Emit an asset event during task execution through yielding Metadata** .. code-block:: python @@ -444,7 +457,7 @@ The following example creates a asset event against the S3 URI ``f"s3://bucket/m @task(outlets=[AssetAlias("my-task-outputs")]) def my_task_with_metadata(): - s3_asset = asset("s3://bucket/my-task") + s3_asset = Asset("s3://bucket/my-task") yield Metadata(s3_asset, extra={"k": "v"}, alias="my-task-outputs") Only one asset event is emitted for an added asset, even if it is added to the alias multiple times, or added to multiple aliases. However, if different ``extra`` values are passed, it can emit multiple asset events. In the following example, two asset events will be emitted. @@ -462,15 +475,15 @@ Only one asset event is emitted for an added asset, even if it is added to the a ] ) def my_task_with_outlet_events(*, outlet_events): - outlet_events["my-task-outputs-1"].add(asset("s3://bucket/my-task"), extra={"k": "v"}) + outlet_events["my-task-outputs-1"].add(Asset("s3://bucket/my-task"), extra={"k": "v"}) # This line won't emit an additional asset event as the asset and extra are the same as the previous line. - outlet_events["my-task-outputs-2"].add(asset("s3://bucket/my-task"), extra={"k": "v"}) + outlet_events["my-task-outputs-2"].add(Asset("s3://bucket/my-task"), extra={"k": "v"}) # This line will emit an additional asset event as the extra is different. - outlet_events["my-task-outputs-3"].add(asset("s3://bucket/my-task"), extra={"k2": "v2"}) + outlet_events["my-task-outputs-3"].add(Asset("s3://bucket/my-task"), extra={"k2": "v2"}) Scheduling based on asset aliases ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Since asset events added to an alias are just simple asset events, a downstream DAG depending on the actual asset can read asset events of it normally, without considering the associated aliases. A downstream DAG can also depend on an asset alias. The authoring syntax is referencing the ``AssetAlias`` by name, and the associated asset events are picked up for scheduling. Note that a DAG can be triggered by a task with ``outlets=AssetAlias("xxx")`` if and only if the alias is resolved into ``asset("s3://bucket/my-task")``. The DAG runs whenever a task with outlet ``AssetAlias("out")`` gets associated with at least one asset at runtime, regardless of the asset's identity. The downstream DAG is not triggered if no assets are associated to the alias for a particular given task run. This also means we can do conditional asset-triggering. +Since asset events added to an alias are just simple asset events, a downstream DAG depending on the actual asset can read asset events of it normally, without considering the associated aliases. A downstream DAG can also depend on an asset alias. The authoring syntax is referencing the ``AssetAlias`` by name, and the associated asset events are picked up for scheduling. Note that a DAG can be triggered by a task with ``outlets=AssetAlias("xxx")`` if and only if the alias is resolved into ``Asset("s3://bucket/my-task")``. The DAG runs whenever a task with outlet ``AssetAlias("out")`` gets associated with at least one asset at runtime, regardless of the asset's identity. The downstream DAG is not triggered if no assets are associated to the alias for a particular given task run. This also means we can do conditional asset-triggering. The asset alias is resolved to the assets during DAG parsing. Thus, if the "min_file_process_interval" configuration is set to a high value, there is a possibility that the asset alias may not be resolved. To resolve this issue, you can trigger DAG parsing. @@ -478,7 +491,7 @@ The asset alias is resolved to the assets during DAG parsing. Thus, if the "min_ with DAG(dag_id="asset-producer"): - @task(outlets=[asset("example-alias")]) + @task(outlets=[Asset("example-alias")]) def produce_asset_events(): pass @@ -487,17 +500,17 @@ The asset alias is resolved to the assets during DAG parsing. Thus, if the "min_ @task(outlets=[AssetAlias("example-alias")]) def produce_asset_events(*, outlet_events): - outlet_events["example-alias"].add(asset("s3://bucket/my-task")) + outlet_events["example-alias"].add(Asset("s3://bucket/my-task")) - with DAG(dag_id="asset-consumer", schedule=asset("s3://bucket/my-task")): + with DAG(dag_id="asset-consumer", schedule=Asset("s3://bucket/my-task")): ... with DAG(dag_id="asset-alias-consumer", schedule=AssetAlias("example-alias")): ... -In the example provided, once the DAG ``asset-alias-producer`` is executed, the asset alias ``AssetAlias("example-alias")`` will be resolved to ``asset("s3://bucket/my-task")``. However, the DAG ``asset-alias-consumer`` will have to wait for the next DAG re-parsing to update its schedule. To address this, Airflow will re-parse the DAGs relying on the asset alias ``AssetAlias("example-alias")`` when it's resolved into assets that these DAGs did not previously depend on. As a result, both the "asset-consumer" and "asset-alias-consumer" DAGs will be triggered after the execution of DAG ``asset-alias-producer``. +In the example provided, once the DAG ``asset-alias-producer`` is executed, the asset alias ``AssetAlias("example-alias")`` will be resolved to ``Asset("s3://bucket/my-task")``. However, the DAG ``asset-alias-consumer`` will have to wait for the next DAG re-parsing to update its schedule. To address this, Airflow will re-parse the DAGs relying on the asset alias ``AssetAlias("example-alias")`` when it's resolved into assets that these DAGs did not previously depend on. As a result, both the "asset-consumer" and "asset-alias-consumer" DAGs will be triggered after the execution of DAG ``asset-alias-producer``. Fetching information from previously emitted asset events through resolved asset aliases @@ -511,7 +524,7 @@ As mentioned in :ref:`Fetching information from previously emitted asset events< @task(outlets=[AssetAlias("example-alias")]) def produce_asset_events(*, outlet_events): - outlet_events["example-alias"].add(asset("s3://bucket/my-task"), extra={"row_count": 1}) + outlet_events["example-alias"].add(Asset("s3://bucket/my-task"), extra={"row_count": 1}) with DAG(dag_id="asset-alias-consumer", schedule=None): diff --git a/newsfragments/41814.significant.rst b/newsfragments/41814.significant.rst new file mode 100644 index 0000000000000..f3b6003a5c39f --- /dev/null +++ b/newsfragments/41814.significant.rst @@ -0,0 +1 @@ +Change the default value of ``strict_dataset_uri_validation`` to True. diff --git a/tests/assets/tests_asset.py b/tests/assets/tests_asset.py index da6ef8ee79e39..afbb46827f3a9 100644 --- a/tests/assets/tests_asset.py +++ b/tests/assets/tests_asset.py @@ -39,7 +39,6 @@ from airflow.models.serialized_dag import SerializedDagModel from airflow.operators.empty import EmptyOperator from airflow.serialization.serialized_objects import BaseSerialization, SerializedDAG -from tests.test_utils.config import conf_vars @pytest.fixture @@ -492,16 +491,6 @@ def normalizer(uri): @patch("airflow.assets._get_uri_normalizer", _mock_get_uri_normalizer_raising_error) -@patch("airflow.assets.warnings.warn") -def test_sanitize_uri_raises_warning(mock_warn): - _sanitize_uri("postgres://localhost:5432/database.schema.table") - msg = mock_warn.call_args.args[0] - assert "The Asset URI postgres://localhost:5432/database.schema.table is not AIP-60 compliant" in msg - assert "In Airflow 3, this will raise an exception." in msg - - -@patch("airflow.assets._get_uri_normalizer", _mock_get_uri_normalizer_raising_error) -@conf_vars({("core", "strict_asset_uri_validation"): "True"}) def test_sanitize_uri_raises_exception(): with pytest.raises(ValueError) as e_info: _sanitize_uri("postgres://localhost:5432/database.schema.table") diff --git a/tests/io/test_path.py b/tests/io/test_path.py index 0e504b586b3d2..9d944c5f51c3c 100644 --- a/tests/io/test_path.py +++ b/tests/io/test_path.py @@ -406,7 +406,7 @@ def test_asset(self): attach("s3", fs=FakeRemoteFileSystem()) p = "s3" - f = "/tmp/foo" + f = "bucket/object" i = Asset(uri=f"{p}://{f}", extra={"foo": "bar"}) o = ObjectStoragePath(i) assert o.protocol == p From a5ffbbda17450a5c99037b292844087119b5676a Mon Sep 17 00:00:00 2001 From: GPK Date: Wed, 9 Oct 2024 12:40:39 +0100 Subject: [PATCH 036/125] Standard provider bash operator (#42252) --- .github/workflows/basic-tests.yml | 5 +++++ .pre-commit-config.yaml | 14 +++----------- airflow/decorators/bash.py | 2 +- airflow/example_dags/example_assets.py | 2 +- airflow/example_dags/example_bash_operator.py | 2 +- airflow/example_dags/example_complex.py | 2 +- .../example_dags/example_inlet_event_extra.py | 2 +- .../example_dags/example_outlet_event_extra.py | 2 +- .../example_passing_params_via_test_command.py | 2 +- airflow/example_dags/example_sensors.py | 4 ++-- airflow/example_dags/example_setup_teardown.py | 2 +- airflow/example_dags/example_task_group.py | 2 +- .../example_dags/example_trigger_target_dag.py | 2 +- airflow/example_dags/example_xcom.py | 2 +- airflow/example_dags/example_xcomargs.py | 2 +- airflow/example_dags/tutorial.py | 2 +- .../celery/executors/celery_executor_utils.py | 8 ++++++-- .../edge/example_dags/integration_test.py | 6 +++++- airflow/providers/openlineage/provider.yaml | 3 ++- .../{ => providers/standard}/operators/bash.py | 0 airflow/providers/standard/provider.yaml | 2 ++ .../{ => providers/standard}/sensors/bash.py | 0 dev/breeze/tests/test_selective_checks.py | 14 +++++++------- dev/perf/dags/elastic_dag.py | 2 +- dev/perf/dags/perf_dag_2.py | 2 +- .../chime_notifier_howto_guide.rst | 2 +- .../notifications/sns.rst | 2 +- .../notifications/sqs.rst | 2 +- .../apprise_notifier_howto_guide.rst | 2 +- .../jira-notifier-howto-guide.rst | 2 +- .../operators/cloud/mlengine.rst | 2 +- .../operators/cloud/pubsub.rst | 2 +- .../guides/developer.rst | 2 +- .../guides/user.rst | 4 ++-- .../pagerduty_notifier_howto_guide.rst | 2 +- .../slack_notifier_howto_guide.rst | 2 +- .../slackwebhook_notifier_howto_guide.rst | 2 +- .../smtp_notifier_howto_guide.rst | 2 +- .../administration-and-deployment/lineage.rst | 2 +- docs/apache-airflow/best-practices.rst | 2 +- docs/apache-airflow/core-concepts/dag-run.rst | 4 ++-- docs/apache-airflow/core-concepts/dags.rst | 2 +- .../apache-airflow/core-concepts/operators.rst | 2 +- docs/apache-airflow/core-concepts/tasks.rst | 2 +- docs/apache-airflow/howto/notifications.rst | 2 +- docs/apache-airflow/howto/operator/bash.rst | 4 ++-- docs/apache-airflow/index.rst | 2 +- .../apache-airflow/operators-and-hooks-ref.rst | 4 ++-- docs/apache-airflow/tutorial/taskflow.rst | 2 +- docs/exts/templates/openlineage.rst.jinja2 | 2 +- generated/provider_dependencies.json | 7 +++++-- kubernetes_tests/test_kubernetes_executor.py | 5 ++++- kubernetes_tests/test_other_executors.py | 5 ++++- tests/callbacks/test_callback_requests.py | 6 +++--- tests/cli/commands/test_task_command.py | 2 +- tests/core/test_core.py | 2 +- tests/dags/subdir2/test_dont_ignore_this.py | 2 +- tests/dags/test_assets.py | 2 +- .../test_backfill_with_upstream_failed_task.py | 2 +- tests/dags/test_default_impersonation.py | 2 +- tests/dags/test_example_bash_operator.py | 2 +- tests/dags/test_failing.py | 2 +- tests/dags/test_heartbeat_failed_fast.py | 2 +- tests/dags/test_impersonation.py | 2 +- tests/dags/test_miscellaneous.py | 2 +- tests/dags/test_multiple_dags.py | 2 +- tests/dags/test_no_impersonation.py | 2 +- tests/dags/test_on_failure_callback.py | 2 +- tests/dags/test_retry_handling_job.py | 2 +- tests/dags/test_sensor.py | 2 +- tests/decorators/test_setup_teardown.py | 2 +- .../executors/test_celery_executor.py | 2 +- tests/jobs/test_scheduler_job.py | 2 +- tests/listeners/test_listeners.py | 2 +- tests/models/test_dag.py | 2 +- tests/models/test_dagrun.py | 2 +- tests/models/test_renderedtifields.py | 2 +- tests/models/test_serialized_dag.py | 2 +- tests/models/test_taskinstance.py | 2 +- tests/models/test_xcom_arg.py | 2 +- .../executors/test_kubernetes_executor.py | 2 +- .../cncf/kubernetes/test_template_rendering.py | 2 +- .../openlineage/extractors/test_bash.py | 2 +- .../openlineage/extractors/test_python.py | 2 +- .../openlineage/plugins/test_adapter.py | 2 +- .../openlineage/plugins/test_facets.py | 2 +- .../openlineage/plugins/test_utils.py | 13 +++++++------ .../providers/openlineage/utils/test_utils.py | 18 +++++++++++------- .../standard}/operators/test_bash.py | 2 +- .../standard}/sensors/test_bash.py | 2 +- tests/sensors/test_external_task_sensor.py | 2 +- tests/serialization/test_dag_serialization.py | 8 ++++---- .../example_external_task_child_deferrable.py | 2 +- .../providers/amazon/aws/example_appflow.py | 2 +- .../providers/amazon/aws/example_http_to_s3.py | 2 +- tests/system/providers/amazon/aws/utils/k8s.py | 2 +- .../apache/hive/example_twitter_dag.py | 2 +- .../apache/iceberg/example_iceberg.py | 2 +- .../cncf/kubernetes/example_kubernetes.py | 2 +- .../kubernetes/example_kubernetes_async.py | 2 +- .../system/providers/docker/example_docker.py | 2 +- .../docker/example_docker_copy_data.py | 2 +- .../cloud/bigquery/example_bigquery_dataset.py | 2 +- .../cloud/bigquery/example_bigquery_queries.py | 2 +- .../bigquery/example_bigquery_queries_async.py | 2 +- .../bigquery/example_bigquery_to_mssql.py | 2 +- .../bigquery/example_bigquery_to_postgres.py | 2 +- .../cloud/cloud_build/example_cloud_build.py | 2 +- .../example_cloud_memorystore_memcached.py | 2 +- .../example_cloud_memorystore_redis.py | 2 +- .../cloud/gcs/example_gcs_copy_delete.py | 2 +- .../google/cloud/gcs/example_gcs_to_gcs.py | 2 +- .../google/cloud/gcs/example_mysql_to_gcs.py | 2 +- .../google/cloud/gcs/example_sftp_to_gcs.py | 2 +- .../google/cloud/gcs/example_sheets.py | 2 +- .../example_kubernetes_engine.py | 2 +- .../example_kubernetes_engine_async.py | 2 +- .../google/cloud/ml_engine/example_mlengine.py | 2 +- .../example_natural_language.py | 2 +- .../google/cloud/pubsub/example_pubsub.py | 2 +- .../sql_to_sheets/example_sql_to_sheets.py | 2 +- .../google/cloud/tasks/example_queue.py | 2 +- .../cloud/transfers/example_postgres_to_gcs.py | 2 +- .../cloud/translate/example_translate.py | 2 +- .../example_video_intelligence.py | 2 +- .../vision/example_vision_annotate_image.py | 2 +- .../datacatalog/example_datacatalog_entries.py | 2 +- .../example_datacatalog_search_catalog.py | 2 +- .../example_datacatalog_tag_templates.py | 2 +- .../datacatalog/example_datacatalog_tags.py | 2 +- .../opsgenie/example_opsgenie_notifier.py | 2 +- .../singularity/example_singularity.py | 2 +- tests/test_utils/compat.py | 13 ++++++++++--- tests/utils/test_dot_renderer.py | 2 +- tests/utils/test_task_group.py | 2 +- tests/www/views/test_views_rendered.py | 3 +-- tests/www/views/test_views_tasks.py | 3 +-- 137 files changed, 203 insertions(+), 176 deletions(-) rename airflow/{ => providers/standard}/operators/bash.py (100%) rename airflow/{ => providers/standard}/sensors/bash.py (100%) rename tests/{ => providers/standard}/operators/test_bash.py (99%) rename tests/{ => providers/standard}/sensors/test_bash.py (97%) diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index 2ccb239487679..49d6a7245bc88 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -200,6 +200,11 @@ jobs: breeze release-management prepare-provider-packages fab --package-format wheel --skip-tag-check - name: "Install Airflow with fab for webserver tests" run: pip install . dist/apache_airflow_providers_fab-*.whl + - name: "Prepare Standard provider packages: wheel" + run: > + breeze release-management prepare-provider-packages standard --package-format wheel --skip-tag-check + - name: "Install Airflow with standard provider for webserver tests" + run: pip install . dist/apache_airflow_providers_standard-*.whl - name: "Install Python client" run: pip install ./dist/apache_airflow_client-*.whl - name: "Initialize Airflow DB and start webserver" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a568c79c265aa..ce557dba431b6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -550,17 +550,9 @@ repos: - id: check-no-providers-in-core-examples language: pygrep name: No providers imports in core example DAGs - description: The core example DAGs have no dependencies other than core Airflow - entry: "^\\s*from airflow\\.providers.*" + description: The core example DAGs have no dependencies other than standard provider or core Airflow + entry: "^\\s*from airflow\\.providers.(?!standard.)" pass_filenames: true - exclude: > - (?x) - ^airflow/example_dags/example_branch_datetime_operator.py| - ^airflow/example_dags/example_branch_day_of_week_operator.py| - ^airflow/example_dags/example_sensors.py| - ^airflow/example_dags/example_sensors.py| - ^airflow/example_dags/example_sensors.py| - ^airflow/example_dags/example_time_delta_sensor_async.py files: ^airflow/example_dags/.*\.py$ - id: check-no-airflow-deprecation-in-providers language: pygrep @@ -717,7 +709,7 @@ repos: files: > (?x) ^airflow/providers/.*\.py$ - exclude: ^.*/.*_vendor/ + exclude: ^.*/.*_vendor/|airflow/providers/standard/operators/bash.py - id: check-get-lineage-collector-providers language: python name: Check providers import hook lineage code from compat diff --git a/airflow/decorators/bash.py b/airflow/decorators/bash.py index 39d3131d28c7b..44738492da098 100644 --- a/airflow/decorators/bash.py +++ b/airflow/decorators/bash.py @@ -21,7 +21,7 @@ from typing import Any, Callable, Collection, Mapping, Sequence from airflow.decorators.base import DecoratedOperator, TaskDecorator, task_decorator_factory -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.context import Context, context_merge from airflow.utils.operator_helpers import determine_kwargs from airflow.utils.types import NOTSET diff --git a/airflow/example_dags/example_assets.py b/airflow/example_dags/example_assets.py index 66369794ed999..451f17a3a3abd 100644 --- a/airflow/example_dags/example_assets.py +++ b/airflow/example_dags/example_assets.py @@ -56,7 +56,7 @@ from airflow.assets import Asset from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.timetables.assets import AssetOrTimeSchedule from airflow.timetables.trigger import CronTriggerTimetable diff --git a/airflow/example_dags/example_bash_operator.py b/airflow/example_dags/example_bash_operator.py index b08d31c9930c7..27702d4cb5f10 100644 --- a/airflow/example_dags/example_bash_operator.py +++ b/airflow/example_dags/example_bash_operator.py @@ -24,8 +24,8 @@ import pendulum from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator +from airflow.providers.standard.operators.bash import BashOperator with DAG( dag_id="example_bash_operator", diff --git a/airflow/example_dags/example_complex.py b/airflow/example_dags/example_complex.py index e7eba78eae815..6d7d504f13d51 100644 --- a/airflow/example_dags/example_complex.py +++ b/airflow/example_dags/example_complex.py @@ -25,7 +25,7 @@ from airflow.models.baseoperator import chain from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator with DAG( dag_id="example_complex", diff --git a/airflow/example_dags/example_inlet_event_extra.py b/airflow/example_dags/example_inlet_event_extra.py index 974534c295b79..9773df7a3f913 100644 --- a/airflow/example_dags/example_inlet_event_extra.py +++ b/airflow/example_dags/example_inlet_event_extra.py @@ -28,7 +28,7 @@ from airflow.assets import Asset from airflow.decorators import task from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator asset = Asset("s3://output/1.txt") diff --git a/airflow/example_dags/example_outlet_event_extra.py b/airflow/example_dags/example_outlet_event_extra.py index 893090460b538..0d097eab0ac27 100644 --- a/airflow/example_dags/example_outlet_event_extra.py +++ b/airflow/example_dags/example_outlet_event_extra.py @@ -29,7 +29,7 @@ from airflow.assets.metadata import Metadata from airflow.decorators import task from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator ds = Asset("s3://output/1.txt") diff --git a/airflow/example_dags/example_passing_params_via_test_command.py b/airflow/example_dags/example_passing_params_via_test_command.py index 2fcb8e4edab7b..7dcd963c09681 100644 --- a/airflow/example_dags/example_passing_params_via_test_command.py +++ b/airflow/example_dags/example_passing_params_via_test_command.py @@ -27,7 +27,7 @@ from airflow.decorators import task from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator @task(task_id="run_this") diff --git a/airflow/example_dags/example_sensors.py b/airflow/example_dags/example_sensors.py index 6fb564e63ae43..f639083858101 100644 --- a/airflow/example_dags/example_sensors.py +++ b/airflow/example_dags/example_sensors.py @@ -22,11 +22,11 @@ import pendulum from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator +from airflow.providers.standard.sensors.bash import BashSensor from airflow.providers.standard.sensors.time import TimeSensor, TimeSensorAsync from airflow.providers.standard.sensors.time_delta import TimeDeltaSensor, TimeDeltaSensorAsync from airflow.providers.standard.sensors.weekday import DayOfWeekSensor -from airflow.sensors.bash import BashSensor from airflow.sensors.filesystem import FileSensor from airflow.sensors.python import PythonSensor from airflow.utils.trigger_rule import TriggerRule diff --git a/airflow/example_dags/example_setup_teardown.py b/airflow/example_dags/example_setup_teardown.py index 9fab87df7568b..81994fabc202d 100644 --- a/airflow/example_dags/example_setup_teardown.py +++ b/airflow/example_dags/example_setup_teardown.py @@ -22,7 +22,7 @@ import pendulum from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.task_group import TaskGroup with DAG( diff --git a/airflow/example_dags/example_task_group.py b/airflow/example_dags/example_task_group.py index 6435a912cc419..5129ad3cc61e1 100644 --- a/airflow/example_dags/example_task_group.py +++ b/airflow/example_dags/example_task_group.py @@ -22,8 +22,8 @@ import pendulum from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.task_group import TaskGroup # [START howto_task_group] diff --git a/airflow/example_dags/example_trigger_target_dag.py b/airflow/example_dags/example_trigger_target_dag.py index 7a009b8dcc6d1..3af68a25607a4 100644 --- a/airflow/example_dags/example_trigger_target_dag.py +++ b/airflow/example_dags/example_trigger_target_dag.py @@ -27,7 +27,7 @@ from airflow.decorators import task from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator @task(task_id="run_this") diff --git a/airflow/example_dags/example_xcom.py b/airflow/example_dags/example_xcom.py index fa99b91834658..2563eda77ee19 100644 --- a/airflow/example_dags/example_xcom.py +++ b/airflow/example_dags/example_xcom.py @@ -24,7 +24,7 @@ from airflow.decorators import task from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator value_1 = [1, 2, 3] value_2 = {"a": "b"} diff --git a/airflow/example_dags/example_xcomargs.py b/airflow/example_dags/example_xcomargs.py index d9d0c94f4ea01..a7103dc191135 100644 --- a/airflow/example_dags/example_xcomargs.py +++ b/airflow/example_dags/example_xcomargs.py @@ -25,7 +25,7 @@ from airflow.decorators import task from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator log = logging.getLogger(__name__) diff --git a/airflow/example_dags/tutorial.py b/airflow/example_dags/tutorial.py index 0e31775c7a9a7..6e27bbcd2e5fe 100644 --- a/airflow/example_dags/tutorial.py +++ b/airflow/example_dags/tutorial.py @@ -32,7 +32,7 @@ from airflow.models.dag import DAG # Operators; we need this to operate! -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator # [END import_module] diff --git a/airflow/providers/celery/executors/celery_executor_utils.py b/airflow/providers/celery/executors/celery_executor_utils.py index 8f25f040c90ad..a7aa6a87ea2cf 100644 --- a/airflow/providers/celery/executors/celery_executor_utils.py +++ b/airflow/providers/celery/executors/celery_executor_utils.py @@ -111,8 +111,12 @@ def on_celery_import_modules(*args, **kwargs): import airflow.jobs.local_task_job_runner import airflow.macros - import airflow.operators.bash - import airflow.operators.python # noqa: F401 + import airflow.operators.python + + try: + import airflow.providers.standard.operators.bash + except ImportError: + import airflow.operators.bash # noqa: F401 with contextlib.suppress(ImportError): import numpy # noqa: F401 diff --git a/airflow/providers/edge/example_dags/integration_test.py b/airflow/providers/edge/example_dags/integration_test.py index d6074abd30ddc..0aad61d354c05 100644 --- a/airflow/providers/edge/example_dags/integration_test.py +++ b/airflow/providers/edge/example_dags/integration_test.py @@ -32,10 +32,14 @@ from airflow.models.dag import DAG from airflow.models.param import Param from airflow.models.variable import Variable -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator from airflow.operators.python import PythonOperator +try: + from airflow.providers.standard.operators.bash import BashOperator +except ImportError: + from airflow.operators.bash import BashOperator # type: ignore[no-redef,attr-defined] + with DAG( dag_id="integration_test", dag_display_name="Integration Test", diff --git a/airflow/providers/openlineage/provider.yaml b/airflow/providers/openlineage/provider.yaml index b249ff46c8591..5b08ac6a77ae7 100644 --- a/airflow/providers/openlineage/provider.yaml +++ b/airflow/providers/openlineage/provider.yaml @@ -84,7 +84,8 @@ config: Exclude some Operators from emitting OpenLineage events by passing a string of semicolon separated full import paths of Operators to disable. type: string - example: "airflow.operators.bash.BashOperator;airflow.operators.python.PythonOperator" + example: "airflow.providers.standard.operators.bash.BashOperator; + airflow.operators.python.PythonOperator" default: "" version_added: 1.1.0 selective_enable: diff --git a/airflow/operators/bash.py b/airflow/providers/standard/operators/bash.py similarity index 100% rename from airflow/operators/bash.py rename to airflow/providers/standard/operators/bash.py diff --git a/airflow/providers/standard/provider.yaml b/airflow/providers/standard/provider.yaml index 83d8acf0a68b3..2d4c4f29bef5c 100644 --- a/airflow/providers/standard/provider.yaml +++ b/airflow/providers/standard/provider.yaml @@ -42,6 +42,7 @@ operators: python-modules: - airflow.providers.standard.operators.datetime - airflow.providers.standard.operators.weekday + - airflow.providers.standard.operators.bash sensors: - integration-name: Standard @@ -50,3 +51,4 @@ sensors: - airflow.providers.standard.sensors.time_delta - airflow.providers.standard.sensors.time - airflow.providers.standard.sensors.weekday + - airflow.providers.standard.sensors.bash diff --git a/airflow/sensors/bash.py b/airflow/providers/standard/sensors/bash.py similarity index 100% rename from airflow/sensors/bash.py rename to airflow/providers/standard/sensors/bash.py diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index 4b28c7a0a63d8..3b58a45ae1b9a 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -703,9 +703,9 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): id="Only Always and common providers tests should run when only common.io and tests/always changed", ), pytest.param( - ("airflow/operators/bash.py",), + ("airflow/providers/standard/operators/bash.py",), { - "affected-providers-list-as-string": None, + "affected-providers-list-as-string": "celery edge standard", "all-python-versions": "['3.9']", "all-python-versions-list-as-string": "3.9", "python-versions": "['3.9']", @@ -717,14 +717,14 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "run-amazon-tests": "false", "docs-build": "true", "run-kubernetes-tests": "false", - "skip-pre-commits": "check-provider-yaml-valid,identity,lint-helm-chart,mypy-airflow,mypy-dev,mypy-docs,mypy-providers," + "skip-pre-commits": "identity,lint-helm-chart,mypy-airflow,mypy-dev,mypy-docs,mypy-providers," "ts-compile-format-lint-ui,ts-compile-format-lint-www", "upgrade-to-newer-dependencies": "false", - "parallel-test-types-list-as-string": "Always Core Operators Serialization", + "parallel-test-types-list-as-string": "Always Core Providers[celery,edge,standard] Serialization", "needs-mypy": "true", - "mypy-folders": "['airflow']", + "mypy-folders": "['providers']", }, - id="Force Core and Serialization tests to run when airflow bash.py changed", + id="Providers standard tests and Serialization tests to run when airflow bash.py changed", ), pytest.param( ("tests/operators/bash.py",), @@ -1720,7 +1720,7 @@ def test_upgrade_to_newer_dependencies( ), pytest.param( ("airflow/providers/celery/file.py",), - {"docs-list-as-string": "apache-airflow celery cncf.kubernetes"}, + {"docs-list-as-string": "apache-airflow celery cncf.kubernetes standard"}, id="Celery python files changed", ), pytest.param( diff --git a/dev/perf/dags/elastic_dag.py b/dev/perf/dags/elastic_dag.py index e0adcdf5caf11..30bfc9acf992b 100644 --- a/dev/perf/dags/elastic_dag.py +++ b/dev/perf/dags/elastic_dag.py @@ -24,7 +24,7 @@ from airflow.models.baseoperator import chain from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator # DAG File used in performance tests. Its shape can be configured by environment variables. RE_TIME_DELTA = re.compile( diff --git a/dev/perf/dags/perf_dag_2.py b/dev/perf/dags/perf_dag_2.py index 641bb7565c8f3..592bbe6087838 100644 --- a/dev/perf/dags/perf_dag_2.py +++ b/dev/perf/dags/perf_dag_2.py @@ -24,7 +24,7 @@ import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator args = { "owner": "airflow", diff --git a/docs/apache-airflow-providers-amazon/notifications/chime_notifier_howto_guide.rst b/docs/apache-airflow-providers-amazon/notifications/chime_notifier_howto_guide.rst index a52540fe78282..e15c3a8c0c8e4 100644 --- a/docs/apache-airflow-providers-amazon/notifications/chime_notifier_howto_guide.rst +++ b/docs/apache-airflow-providers-amazon/notifications/chime_notifier_howto_guide.rst @@ -31,7 +31,7 @@ Example Code: from datetime import datetime from airflow import DAG - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from airflow.providers.amazon.aws.notifications.chime import send_chime_notification with DAG( diff --git a/docs/apache-airflow-providers-amazon/notifications/sns.rst b/docs/apache-airflow-providers-amazon/notifications/sns.rst index bbaad4f814712..262cd966ae418 100644 --- a/docs/apache-airflow-providers-amazon/notifications/sns.rst +++ b/docs/apache-airflow-providers-amazon/notifications/sns.rst @@ -33,7 +33,7 @@ Example Code: from datetime import datetime from airflow import DAG - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from airflow.providers.amazon.aws.notifications.sns import send_sns_notification dag_failure_sns_notification = send_sns_notification( diff --git a/docs/apache-airflow-providers-amazon/notifications/sqs.rst b/docs/apache-airflow-providers-amazon/notifications/sqs.rst index 6951caa9fdd67..d74a2477d62ca 100644 --- a/docs/apache-airflow-providers-amazon/notifications/sqs.rst +++ b/docs/apache-airflow-providers-amazon/notifications/sqs.rst @@ -33,7 +33,7 @@ Example Code: from datetime import datetime, timezone from airflow import DAG - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from airflow.providers.amazon.aws.notifications.sqs import send_sqs_notification dag_failure_sqs_notification = send_sqs_notification( diff --git a/docs/apache-airflow-providers-apprise/notifications/apprise_notifier_howto_guide.rst b/docs/apache-airflow-providers-apprise/notifications/apprise_notifier_howto_guide.rst index 777a3d46a3b2c..2a0aeaaa10764 100644 --- a/docs/apache-airflow-providers-apprise/notifications/apprise_notifier_howto_guide.rst +++ b/docs/apache-airflow-providers-apprise/notifications/apprise_notifier_howto_guide.rst @@ -30,7 +30,7 @@ Example Code: from datetime import datetime from airflow import DAG - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from airflow.providers.apprise.notifications.apprise import send_apprise_notification from apprise import NotifyType diff --git a/docs/apache-airflow-providers-atlassian-jira/notifications/jira-notifier-howto-guide.rst b/docs/apache-airflow-providers-atlassian-jira/notifications/jira-notifier-howto-guide.rst index e0ed12558145f..a5617b9035de0 100644 --- a/docs/apache-airflow-providers-atlassian-jira/notifications/jira-notifier-howto-guide.rst +++ b/docs/apache-airflow-providers-atlassian-jira/notifications/jira-notifier-howto-guide.rst @@ -31,7 +31,7 @@ Example Code from datetime import datetime from airflow import DAG - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from airflow.providers.atlassian.jira.notifications.jira import send_jira_notification with DAG( diff --git a/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst b/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst index 9c71e885d8d6a..f64705e1c267c 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst @@ -96,7 +96,7 @@ instead. You can use :ref:`Jinja templating ` with the ``project_id`` and ``model`` fields to dynamically determine their values. The result are saved to :ref:`XCom `, allowing them to be used by other operators. In this case, the -:class:`~airflow.operators.bash.BashOperator` is used to print the model information. +:class:`~airflow.providers.standard.operators.bash.BashOperator` is used to print the model information. .. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py :language: python diff --git a/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst b/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst index 7d9a6dd5ff2ab..8fb497a14f01e 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst @@ -101,7 +101,7 @@ Also for this action you can use sensor in the deferrable mode: :start-after: [START howto_operator_gcp_pubsub_pull_message_with_operator] :end-before: [END howto_operator_gcp_pubsub_pull_message_with_operator] -To pull messages from XCom use the :class:`~airflow.operators.bash.BashOperator`. +To pull messages from XCom use the :class:`~airflow.providers.standard.operators.bash.BashOperator`. .. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py :language: python diff --git a/docs/apache-airflow-providers-openlineage/guides/developer.rst b/docs/apache-airflow-providers-openlineage/guides/developer.rst index c2a5ffdc8fda4..ccab215fc1846 100644 --- a/docs/apache-airflow-providers-openlineage/guides/developer.rst +++ b/docs/apache-airflow-providers-openlineage/guides/developer.rst @@ -390,7 +390,7 @@ An Operator inside the Airflow DAG can be annotated with inlets and outlets like import pendulum from airflow import DAG - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from airflow.lineage.entities import Table, File, Column, User diff --git a/docs/apache-airflow-providers-openlineage/guides/user.rst b/docs/apache-airflow-providers-openlineage/guides/user.rst index 2c299b8c6d39e..4f95253e1f242 100644 --- a/docs/apache-airflow-providers-openlineage/guides/user.rst +++ b/docs/apache-airflow-providers-openlineage/guides/user.rst @@ -257,13 +257,13 @@ full import paths of Airflow Operators to disable as ``disabled_for_operators`` [openlineage] transport = {"type": "http", "url": "http://example.com:5000", "endpoint": "api/v1/lineage"} - disabled_for_operators = 'airflow.operators.bash.BashOperator;airflow.operators.python.PythonOperator' + disabled_for_operators = 'airflow.providers.standard.operators.bash.BashOperator;airflow.operators.python.PythonOperator' ``AIRFLOW__OPENLINEAGE__DISABLED_FOR_OPERATORS`` environment variable is an equivalent. .. code-block:: ini - AIRFLOW__OPENLINEAGE__DISABLED_FOR_OPERATORS='airflow.operators.bash.BashOperator;airflow.operators.python.PythonOperator' + AIRFLOW__OPENLINEAGE__DISABLED_FOR_OPERATORS='airflow.providers.standard.operators.bash.BashOperator;airflow.operators.python.PythonOperator' Full Task Info ^^^^^^^^^^^^^^ diff --git a/docs/apache-airflow-providers-pagerduty/notifications/pagerduty_notifier_howto_guide.rst b/docs/apache-airflow-providers-pagerduty/notifications/pagerduty_notifier_howto_guide.rst index d16f9b2b9e48a..658054bd0a5ec 100644 --- a/docs/apache-airflow-providers-pagerduty/notifications/pagerduty_notifier_howto_guide.rst +++ b/docs/apache-airflow-providers-pagerduty/notifications/pagerduty_notifier_howto_guide.rst @@ -31,7 +31,7 @@ Example Code: from datetime import datetime from airflow import DAG - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from airflow.providers.pagerduty.notifications.pagerduty import send_pagerduty_notification with DAG( diff --git a/docs/apache-airflow-providers-slack/notifications/slack_notifier_howto_guide.rst b/docs/apache-airflow-providers-slack/notifications/slack_notifier_howto_guide.rst index a4f891f8a57bb..3b6a1e7879924 100644 --- a/docs/apache-airflow-providers-slack/notifications/slack_notifier_howto_guide.rst +++ b/docs/apache-airflow-providers-slack/notifications/slack_notifier_howto_guide.rst @@ -31,7 +31,7 @@ Example Code: from datetime import datetime from airflow import DAG - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from airflow.providers.slack.notifications.slack import send_slack_notification with DAG( diff --git a/docs/apache-airflow-providers-slack/notifications/slackwebhook_notifier_howto_guide.rst b/docs/apache-airflow-providers-slack/notifications/slackwebhook_notifier_howto_guide.rst index 66ced818a7d18..e6ef3ab41409c 100644 --- a/docs/apache-airflow-providers-slack/notifications/slackwebhook_notifier_howto_guide.rst +++ b/docs/apache-airflow-providers-slack/notifications/slackwebhook_notifier_howto_guide.rst @@ -32,7 +32,7 @@ Example Code: from datetime import datetime, timezone from airflow import DAG - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from airflow.providers.slack.notifications.slack_webhook import send_slack_webhook_notification dag_failure_slack_webhook_notification = send_slack_webhook_notification( diff --git a/docs/apache-airflow-providers-smtp/notifications/smtp_notifier_howto_guide.rst b/docs/apache-airflow-providers-smtp/notifications/smtp_notifier_howto_guide.rst index 4cb1bf310e03d..e47f9e340c93b 100644 --- a/docs/apache-airflow-providers-smtp/notifications/smtp_notifier_howto_guide.rst +++ b/docs/apache-airflow-providers-smtp/notifications/smtp_notifier_howto_guide.rst @@ -31,7 +31,7 @@ Example Code: from datetime import datetime from airflow import DAG - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from airflow.providers.smtp.notifications.smtp import send_smtp_notification with DAG( diff --git a/docs/apache-airflow/administration-and-deployment/lineage.rst b/docs/apache-airflow/administration-and-deployment/lineage.rst index b274809175c03..3740e8b56f937 100644 --- a/docs/apache-airflow/administration-and-deployment/lineage.rst +++ b/docs/apache-airflow/administration-and-deployment/lineage.rst @@ -36,7 +36,7 @@ works. from airflow.lineage import AUTO from airflow.lineage.entities import File from airflow.models import DAG - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator FILE_CATEGORIES = ["CAT1", "CAT2", "CAT3"] diff --git a/docs/apache-airflow/best-practices.rst b/docs/apache-airflow/best-practices.rst index 80a5996f36768..466f546ff7147 100644 --- a/docs/apache-airflow/best-practices.rst +++ b/docs/apache-airflow/best-practices.rst @@ -480,7 +480,7 @@ It's easier to grab the concept with an example. Let's say that we have the foll from airflow import DAG from airflow.decorators import task from airflow.exceptions import AirflowException - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule diff --git a/docs/apache-airflow/core-concepts/dag-run.rst b/docs/apache-airflow/core-concepts/dag-run.rst index 0621d3f771e2c..97fd4e28a7b58 100644 --- a/docs/apache-airflow/core-concepts/dag-run.rst +++ b/docs/apache-airflow/core-concepts/dag-run.rst @@ -101,7 +101,7 @@ in the configuration file. When turned off, the scheduler creates a DAG run only https://github.com/apache/airflow/blob/main/airflow/example_dags/tutorial.py """ from airflow.models.dag import DAG - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator import datetime import pendulum @@ -241,7 +241,7 @@ Example of a parameterized DAG: import pendulum from airflow import DAG - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator dag = DAG( "example_parameterized_dag", diff --git a/docs/apache-airflow/core-concepts/dags.rst b/docs/apache-airflow/core-concepts/dags.rst index f9dc7d64c72e0..64726e08010e1 100644 --- a/docs/apache-airflow/core-concepts/dags.rst +++ b/docs/apache-airflow/core-concepts/dags.rst @@ -574,7 +574,7 @@ TaskGroup also supports ``default_args`` like DAG, it will overwrite the ``defau from airflow import DAG from airflow.decorators import task_group - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator with DAG( diff --git a/docs/apache-airflow/core-concepts/operators.rst b/docs/apache-airflow/core-concepts/operators.rst index 354697c308537..6a0825df28773 100644 --- a/docs/apache-airflow/core-concepts/operators.rst +++ b/docs/apache-airflow/core-concepts/operators.rst @@ -28,7 +28,7 @@ An Operator is conceptually a template for a predefined :doc:`Task `, tha Airflow has a very extensive set of operators available, with some built-in to the core or pre-installed providers. Some popular operators from core include: -- :class:`~airflow.operators.bash.BashOperator` - executes a bash command +- :class:`~airflow.providers.standard.operators.bash.BashOperator` - executes a bash command - :class:`~airflow.operators.python.PythonOperator` - calls an arbitrary Python function - :class:`~airflow.operators.email.EmailOperator` - sends an email - Use the ``@task`` decorator to execute an arbitrary Python function. It doesn't support rendering jinja templates passed as arguments. diff --git a/docs/apache-airflow/core-concepts/tasks.rst b/docs/apache-airflow/core-concepts/tasks.rst index ad03283ef772d..5adfe8be46024 100644 --- a/docs/apache-airflow/core-concepts/tasks.rst +++ b/docs/apache-airflow/core-concepts/tasks.rst @@ -236,7 +236,7 @@ If you'd like to reproduce zombie tasks for development/testing processes, follo .. code-block:: python from airflow.decorators import dag - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from datetime import datetime diff --git a/docs/apache-airflow/howto/notifications.rst b/docs/apache-airflow/howto/notifications.rst index c477ec1d3c173..993a36b389423 100644 --- a/docs/apache-airflow/howto/notifications.rst +++ b/docs/apache-airflow/howto/notifications.rst @@ -59,7 +59,7 @@ Here's an example of using the above notifier: from datetime import datetime from airflow.models.dag import DAG - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator from myprovider.notifier import MyNotifier diff --git a/docs/apache-airflow/howto/operator/bash.rst b/docs/apache-airflow/howto/operator/bash.rst index daf430fa14cde..e4af9bcad6b83 100644 --- a/docs/apache-airflow/howto/operator/bash.rst +++ b/docs/apache-airflow/howto/operator/bash.rst @@ -22,7 +22,7 @@ BashOperator ============ -Use the :class:`~airflow.operators.bash.BashOperator` to execute +Use the :class:`~airflow.providers.standard.operators.bash.BashOperator` to execute commands in a `Bash `__ shell. The Bash command or script to execute is determined by: @@ -390,7 +390,7 @@ There are numerous possibilities with this type of pre-execution enrichment. BashSensor ========== -Use the :class:`~airflow.sensors.bash.BashSensor` to use arbitrary command for sensing. The command +Use the :class:`~airflow.providers.standard.sensors.bash.BashSensor` to use arbitrary command for sensing. The command should return 0 when it succeeds, any other value otherwise. .. exampleinclude:: /../../airflow/example_dags/example_sensors.py diff --git a/docs/apache-airflow/index.rst b/docs/apache-airflow/index.rst index 44dcd9a3bd36c..38d62ecd04a72 100644 --- a/docs/apache-airflow/index.rst +++ b/docs/apache-airflow/index.rst @@ -41,7 +41,7 @@ Take a look at the following snippet of code: from airflow import DAG from airflow.decorators import task - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator # A DAG represents a workflow, a collection of tasks with DAG(dag_id="demo", start_date=datetime(2022, 1, 1), schedule="0 0 * * *") as dag: diff --git a/docs/apache-airflow/operators-and-hooks-ref.rst b/docs/apache-airflow/operators-and-hooks-ref.rst index 16b74305a958b..c82a4f3a66d73 100644 --- a/docs/apache-airflow/operators-and-hooks-ref.rst +++ b/docs/apache-airflow/operators-and-hooks-ref.rst @@ -50,7 +50,7 @@ For details see: :doc:`apache-airflow-providers:operators-and-hooks-ref/index`. * - Operators - Guides - * - :mod:`airflow.operators.bash` + * - :mod:`airflow.providers.standard.operators.bash` - :doc:`How to use ` * - :mod:`airflow.operators.branch` @@ -82,7 +82,7 @@ For details see: :doc:`apache-airflow-providers:operators-and-hooks-ref/index`. * - Sensors - Guides - * - :mod:`airflow.sensors.bash` + * - :mod:`airflow.providers.standard.sensors.bash` - :ref:`How to use ` * - :mod:`airflow.sensors.external_task` diff --git a/docs/apache-airflow/tutorial/taskflow.rst b/docs/apache-airflow/tutorial/taskflow.rst index c77debab8f328..aac04f9b53454 100644 --- a/docs/apache-airflow/tutorial/taskflow.rst +++ b/docs/apache-airflow/tutorial/taskflow.rst @@ -437,7 +437,7 @@ the parameter value is used. Adding dependencies between decorated and traditional tasks ----------------------------------------------------------- The above tutorial shows how to create dependencies between TaskFlow functions. However, dependencies can also -be set between traditional tasks (such as :class:`~airflow.operators.bash.BashOperator` +be set between traditional tasks (such as :class:`~airflow.providers.standard.operators.bash.BashOperator` or :class:`~airflow.sensors.filesystem.FileSensor`) and TaskFlow functions. Building this dependency is shown in the code below: diff --git a/docs/exts/templates/openlineage.rst.jinja2 b/docs/exts/templates/openlineage.rst.jinja2 index 7dffc175f84b2..dfac543cbe7f1 100644 --- a/docs/exts/templates/openlineage.rst.jinja2 +++ b/docs/exts/templates/openlineage.rst.jinja2 @@ -22,7 +22,7 @@ At the moment, two core operators supports OpenLineage. These operators function capable of running any code, which might limit the extent of lineage extraction. - :class:`~airflow.operators.python.PythonOperator` (via :class:`airflow.providers.openlineage.extractors.python.PythonExtractor`) -- :class:`~airflow.operators.bash.BashOperator` (via :class:`airflow.providers.openlineage.extractors.bash.BashExtractor`) +- :class:`~airflow.providers.standard.operators.bash.BashOperator` (via :class:`airflow.providers.openlineage.extractors.bash.BashExtractor`) :class:`~airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator` diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index b1ff0f7dcfab7..57bca636ed605 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -344,7 +344,8 @@ "devel-deps": [], "plugins": [], "cross-providers-deps": [ - "cncf.kubernetes" + "cncf.kubernetes", + "standard" ], "excluded-python-versions": [], "state": "ready" @@ -532,7 +533,9 @@ "plugin-class": "airflow.providers.edge.plugins.edge_executor_plugin.EdgeExecutorPlugin" } ], - "cross-providers-deps": [], + "cross-providers-deps": [ + "standard" + ], "excluded-python-versions": [], "state": "not-ready" }, diff --git a/kubernetes_tests/test_kubernetes_executor.py b/kubernetes_tests/test_kubernetes_executor.py index a270243bfac78..42b181b443041 100644 --- a/kubernetes_tests/test_kubernetes_executor.py +++ b/kubernetes_tests/test_kubernetes_executor.py @@ -20,7 +20,10 @@ import pytest -from kubernetes_tests.test_base import EXECUTOR, BaseK8STest # isort:skip (needed to workaround isort bug) +from kubernetes_tests.test_base import ( + EXECUTOR, + BaseK8STest, # isort:skip (needed to workaround isort bug) +) @pytest.mark.skipif(EXECUTOR != "KubernetesExecutor", reason="Only runs on KubernetesExecutor") diff --git a/kubernetes_tests/test_other_executors.py b/kubernetes_tests/test_other_executors.py index 97b7e3df728ee..3a1aea16f1291 100644 --- a/kubernetes_tests/test_other_executors.py +++ b/kubernetes_tests/test_other_executors.py @@ -20,7 +20,10 @@ import pytest -from kubernetes_tests.test_base import EXECUTOR, BaseK8STest # isort:skip (needed to workaround isort bug) +from kubernetes_tests.test_base import ( + EXECUTOR, + BaseK8STest, # isort:skip (needed to workaround isort bug) +) # These tests are here because only KubernetesExecutor can run the tests in diff --git a/tests/callbacks/test_callback_requests.py b/tests/callbacks/test_callback_requests.py index 5992ee6fbbf70..7bbe41387750c 100644 --- a/tests/callbacks/test_callback_requests.py +++ b/tests/callbacks/test_callback_requests.py @@ -27,7 +27,7 @@ ) from airflow.models.dag import DAG from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils import timezone from airflow.utils.state import State from airflow.utils.types import DagRunType @@ -100,7 +100,7 @@ def test_simple_ti_roundtrip_exec_config_pod(self): from airflow.callbacks.callback_requests import TaskCallbackRequest from airflow.models import TaskInstance from airflow.models.taskinstance import SimpleTaskInstance - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator test_pod = k8s.V1Pod(metadata=k8s.V1ObjectMeta(name="hello", namespace="ns")) op = BashOperator(task_id="hi", executor_config={"pod_override": test_pod}, bash_command="hi") @@ -115,7 +115,7 @@ def test_simple_ti_roundtrip_dates(self, dag_maker): from airflow.callbacks.callback_requests import TaskCallbackRequest from airflow.models import TaskInstance from airflow.models.taskinstance import SimpleTaskInstance - from airflow.operators.bash import BashOperator + from airflow.providers.standard.operators.bash import BashOperator with dag_maker(schedule=timedelta(weeks=1), serialized=True): op = BashOperator(task_id="hi", bash_command="hi") diff --git a/tests/cli/commands/test_task_command.py b/tests/cli/commands/test_task_command.py index 36cbcc85a7214..3397005b80eb0 100644 --- a/tests/cli/commands/test_task_command.py +++ b/tests/cli/commands/test_task_command.py @@ -45,8 +45,8 @@ from airflow.executors.local_executor import LocalExecutor from airflow.models import DagBag, DagRun, Pool, TaskInstance from airflow.models.serialized_dag import SerializedDagModel -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State, TaskInstanceState diff --git a/tests/core/test_core.py b/tests/core/test_core.py index a75428b33a18b..d44235f955243 100644 --- a/tests/core/test_core.py +++ b/tests/core/test_core.py @@ -27,9 +27,9 @@ from airflow.exceptions import AirflowTaskTimeout from airflow.models import DagRun, TaskFail, TaskInstance from airflow.models.baseoperator import BaseOperator -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator from airflow.operators.python import PythonOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_task_fail diff --git a/tests/dags/subdir2/test_dont_ignore_this.py b/tests/dags/subdir2/test_dont_ignore_this.py index 72c1796a424b2..07f04293d7d54 100644 --- a/tests/dags/subdir2/test_dont_ignore_this.py +++ b/tests/dags/subdir2/test_dont_ignore_this.py @@ -20,7 +20,7 @@ from datetime import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator DEFAULT_DATE = datetime(2019, 12, 1) diff --git a/tests/dags/test_assets.py b/tests/dags/test_assets.py index a4ecd6aad4a6a..014ae6fd0ca9f 100644 --- a/tests/dags/test_assets.py +++ b/tests/dags/test_assets.py @@ -22,8 +22,8 @@ from airflow.assets import Asset from airflow.exceptions import AirflowFailException, AirflowSkipException from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.operators.python import PythonOperator +from airflow.providers.standard.operators.bash import BashOperator skip_task_dag_dataset = Asset("s3://dag_with_skip_task/output_1.txt", extra={"hi": "bye"}) fail_task_dag_dataset = Asset("s3://dag_with_fail_task/output_1.txt", extra={"hi": "bye"}) diff --git a/tests/dags/test_backfill_with_upstream_failed_task.py b/tests/dags/test_backfill_with_upstream_failed_task.py index d2cb6353bfaa3..865b0da4ff426 100644 --- a/tests/dags/test_backfill_with_upstream_failed_task.py +++ b/tests/dags/test_backfill_with_upstream_failed_task.py @@ -20,7 +20,7 @@ import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator dag = DAG( dag_id="test_backfill_with_upstream_failed_task", diff --git a/tests/dags/test_default_impersonation.py b/tests/dags/test_default_impersonation.py index 468b7dce072dd..4bee30457b7bd 100644 --- a/tests/dags/test_default_impersonation.py +++ b/tests/dags/test_default_impersonation.py @@ -21,7 +21,7 @@ from datetime import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator DEFAULT_DATE = datetime(2016, 1, 1) diff --git a/tests/dags/test_example_bash_operator.py b/tests/dags/test_example_bash_operator.py index eb472b8011ebd..52126f0e10206 100644 --- a/tests/dags/test_example_bash_operator.py +++ b/tests/dags/test_example_bash_operator.py @@ -20,8 +20,8 @@ import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator +from airflow.providers.standard.operators.bash import BashOperator dag = DAG( dag_id="test_example_bash_operator", diff --git a/tests/dags/test_failing.py b/tests/dags/test_failing.py index 28e2fb5881987..646665a8802ef 100644 --- a/tests/dags/test_failing.py +++ b/tests/dags/test_failing.py @@ -20,7 +20,7 @@ import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator dag = DAG( dag_id="test_failing_bash_operator", diff --git a/tests/dags/test_heartbeat_failed_fast.py b/tests/dags/test_heartbeat_failed_fast.py index aee7a67030585..890756ef2017d 100644 --- a/tests/dags/test_heartbeat_failed_fast.py +++ b/tests/dags/test_heartbeat_failed_fast.py @@ -20,7 +20,7 @@ from datetime import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator DEFAULT_DATE = datetime(2016, 1, 1) diff --git a/tests/dags/test_impersonation.py b/tests/dags/test_impersonation.py index 33a3c89d328d9..6c2ca2d810026 100644 --- a/tests/dags/test_impersonation.py +++ b/tests/dags/test_impersonation.py @@ -21,7 +21,7 @@ from datetime import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator DEFAULT_DATE = datetime(2016, 1, 1) diff --git a/tests/dags/test_miscellaneous.py b/tests/dags/test_miscellaneous.py index c19277a617571..4a2c6b56a365d 100644 --- a/tests/dags/test_miscellaneous.py +++ b/tests/dags/test_miscellaneous.py @@ -22,8 +22,8 @@ import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator +from tests.test_utils.compat import BashOperator args = { "owner": "airflow", diff --git a/tests/dags/test_multiple_dags.py b/tests/dags/test_multiple_dags.py index 5801084fab712..27f159bfb1272 100644 --- a/tests/dags/test_multiple_dags.py +++ b/tests/dags/test_multiple_dags.py @@ -20,7 +20,7 @@ import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator args = {"owner": "airflow", "retries": 3, "start_date": datetime.datetime(2022, 1, 1)} diff --git a/tests/dags/test_no_impersonation.py b/tests/dags/test_no_impersonation.py index 2a75d5321473c..22b47fcc878c8 100644 --- a/tests/dags/test_no_impersonation.py +++ b/tests/dags/test_no_impersonation.py @@ -21,7 +21,7 @@ from datetime import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator DEFAULT_DATE = datetime(2016, 1, 1) diff --git a/tests/dags/test_on_failure_callback.py b/tests/dags/test_on_failure_callback.py index e2f4ab9027a8c..f6765a3698097 100644 --- a/tests/dags/test_on_failure_callback.py +++ b/tests/dags/test_on_failure_callback.py @@ -21,8 +21,8 @@ from airflow.exceptions import AirflowFailException from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.operators.python import PythonOperator +from airflow.providers.standard.operators.bash import BashOperator DEFAULT_DATE = datetime(2016, 1, 1) diff --git a/tests/dags/test_retry_handling_job.py b/tests/dags/test_retry_handling_job.py index 7040e8c8756f8..ede9c4c6ace50 100644 --- a/tests/dags/test_retry_handling_job.py +++ b/tests/dags/test_retry_handling_job.py @@ -20,7 +20,7 @@ from datetime import datetime, timedelta from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator default_args = { "owner": "airflow", diff --git a/tests/dags/test_sensor.py b/tests/dags/test_sensor.py index d023949e31a98..07c9cc7efdffb 100644 --- a/tests/dags/test_sensor.py +++ b/tests/dags/test_sensor.py @@ -20,8 +20,8 @@ from airflow.decorators import task from airflow.models.dag import DAG -from airflow.providers.standard.sensors.date_time import DateTimeSensor from airflow.utils import timezone +from tests.test_utils.compat import DateTimeSensor with DAG( dag_id="test_sensor", start_date=datetime.datetime(2022, 1, 1), catchup=False, schedule="@once" diff --git a/tests/decorators/test_setup_teardown.py b/tests/decorators/test_setup_teardown.py index 13451ba379eca..1f2a3dcdbc630 100644 --- a/tests/decorators/test_setup_teardown.py +++ b/tests/decorators/test_setup_teardown.py @@ -22,7 +22,7 @@ from airflow.decorators import setup, task, task_group, teardown from airflow.decorators.setup_teardown import context_wrapper from airflow.exceptions import AirflowException -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator pytestmark = pytest.mark.db_test diff --git a/tests/integration/executors/test_celery_executor.py b/tests/integration/executors/test_celery_executor.py index 9c7fe96ff186d..4ec1cc458c3a5 100644 --- a/tests/integration/executors/test_celery_executor.py +++ b/tests/integration/executors/test_celery_executor.py @@ -43,7 +43,7 @@ from airflow.models.dag import DAG from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance from airflow.models.taskinstancekey import TaskInstanceKey -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.state import State, TaskInstanceState from tests.test_utils import db diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index b4e4c10cff456..d26369f5d728e 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -60,8 +60,8 @@ from airflow.models.pool import Pool from airflow.models.serialized_dag import SerializedDagModel from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance, TaskInstanceKey -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.serialization.serialized_objects import SerializedDAG from airflow.utils import timezone from airflow.utils.file import list_py_file_paths diff --git a/tests/listeners/test_listeners.py b/tests/listeners/test_listeners.py index 3c34ab0ff8ab2..29ec25a9a8d2a 100644 --- a/tests/listeners/test_listeners.py +++ b/tests/listeners/test_listeners.py @@ -25,7 +25,7 @@ from airflow.exceptions import AirflowException from airflow.jobs.job import Job, run_job from airflow.listeners.listener import get_listener_manager -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils import timezone from airflow.utils.session import provide_session from airflow.utils.state import DagRunState, TaskInstanceState diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index ab67c3778c262..c79ca24e03a25 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -74,9 +74,9 @@ from airflow.models.serialized_dag import SerializedDagModel from airflow.models.taskfail import TaskFail from airflow.models.taskinstance import TaskInstance as TI -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator from airflow.operators.python import PythonOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.security import permissions from airflow.templates import NativeEnvironment, SandboxedEnvironment from airflow.timetables.base import DagRunInfo, DataInterval, TimeRestriction, Timetable diff --git a/tests/models/test_dagrun.py b/tests/models/test_dagrun.py index c7dacaeb291e4..9184f561b3df9 100644 --- a/tests/models/test_dagrun.py +++ b/tests/models/test_dagrun.py @@ -36,9 +36,9 @@ from airflow.models.taskinstance import TaskInstance, TaskInstanceNote, clear_task_instances from airflow.models.taskmap import TaskMap from airflow.models.taskreschedule import TaskReschedule -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator from airflow.operators.python import ShortCircuitOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.serialization.serialized_objects import SerializedDAG from airflow.stats import Stats from airflow.triggers.base import StartTriggerArgs diff --git a/tests/models/test_renderedtifields.py b/tests/models/test_renderedtifields.py index b8c45193814aa..1de83954ee4bd 100644 --- a/tests/models/test_renderedtifields.py +++ b/tests/models/test_renderedtifields.py @@ -31,7 +31,7 @@ from airflow.decorators import task as task_decorator from airflow.models import Variable from airflow.models.renderedtifields import RenderedTaskInstanceFields as RTIF -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.task_instance_session import set_current_task_instance_session from airflow.utils.timezone import datetime from tests.test_utils.asserts import assert_queries_count diff --git a/tests/models/test_serialized_dag.py b/tests/models/test_serialized_dag.py index d9a77e55edaf5..93845e95832cf 100644 --- a/tests/models/test_serialized_dag.py +++ b/tests/models/test_serialized_dag.py @@ -31,7 +31,7 @@ from airflow.models.dagbag import DagBag from airflow.models.dagcode import DagCode from airflow.models.serialized_dag import SerializedDagModel as SDM -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.serialization.serialized_objects import SerializedDAG from airflow.settings import json from airflow.utils.hashlib_wrapper import md5 diff --git a/tests/models/test_taskinstance.py b/tests/models/test_taskinstance.py index 8c334366f0488..c09d3575d1eca 100644 --- a/tests/models/test_taskinstance.py +++ b/tests/models/test_taskinstance.py @@ -76,9 +76,9 @@ from airflow.models.variable import Variable from airflow.models.xcom import LazyXComSelectSequence, XCom from airflow.notifications.basenotifier import BaseNotifier -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator from airflow.operators.python import PythonOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.sensors.base import BaseSensorOperator from airflow.sensors.python import PythonSensor from airflow.serialization.serialized_objects import SerializedBaseOperator, SerializedDAG diff --git a/tests/models/test_xcom_arg.py b/tests/models/test_xcom_arg.py index 6108c5e81930f..fcc2e546009c9 100644 --- a/tests/models/test_xcom_arg.py +++ b/tests/models/test_xcom_arg.py @@ -19,8 +19,8 @@ import pytest from airflow.models.xcom_arg import XComArg -from airflow.operators.bash import BashOperator from airflow.operators.python import PythonOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.types import NOTSET from tests.test_utils.config import conf_vars from tests.test_utils.db import clear_db_dags, clear_db_runs diff --git a/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py b/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py index 4622d31b575fc..12435426dd899 100644 --- a/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py +++ b/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py @@ -30,7 +30,6 @@ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.models.taskinstancekey import TaskInstanceKey -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator from airflow.providers.cncf.kubernetes import pod_generator from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import ( @@ -55,6 +54,7 @@ from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator from airflow.utils import timezone from airflow.utils.state import State, TaskInstanceState +from tests.test_utils.compat import BashOperator from tests.test_utils.config import conf_vars pytestmark = pytest.mark.skip_if_database_isolation_mode diff --git a/tests/providers/cncf/kubernetes/test_template_rendering.py b/tests/providers/cncf/kubernetes/test_template_rendering.py index ab2820284d553..4c087d6040e68 100644 --- a/tests/providers/cncf/kubernetes/test_template_rendering.py +++ b/tests/providers/cncf/kubernetes/test_template_rendering.py @@ -24,11 +24,11 @@ from airflow.configuration import TEST_DAGS_FOLDER from airflow.models.renderedtifields import RenderedTaskInstanceFields, RenderedTaskInstanceFields as RTIF -from airflow.operators.bash import BashOperator from airflow.providers.cncf.kubernetes.template_rendering import get_rendered_k8s_spec, render_k8s_pod_yaml from airflow.utils.session import create_session from airflow.version import version from tests.models import DEFAULT_DATE +from tests.test_utils.compat import BashOperator pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/providers/openlineage/extractors/test_bash.py b/tests/providers/openlineage/extractors/test_bash.py index de65a1d176d8a..fc862e5ee30b9 100644 --- a/tests/providers/openlineage/extractors/test_bash.py +++ b/tests/providers/openlineage/extractors/test_bash.py @@ -26,8 +26,8 @@ from airflow import DAG from airflow.exceptions import AirflowProviderDeprecationWarning -from airflow.operators.bash import BashOperator from airflow.providers.openlineage.extractors.bash import BashExtractor +from tests.test_utils.compat import BashOperator pytestmark = pytest.mark.db_test diff --git a/tests/providers/openlineage/extractors/test_python.py b/tests/providers/openlineage/extractors/test_python.py index 81284383d8648..44c5503b712d8 100644 --- a/tests/providers/openlineage/extractors/test_python.py +++ b/tests/providers/openlineage/extractors/test_python.py @@ -28,9 +28,9 @@ from airflow import DAG from airflow.exceptions import AirflowProviderDeprecationWarning -from airflow.operators.bash import BashOperator from airflow.operators.python import PythonOperator from airflow.providers.openlineage.extractors.python import PythonExtractor +from tests.test_utils.compat import BashOperator pytestmark = pytest.mark.db_test diff --git a/tests/providers/openlineage/plugins/test_adapter.py b/tests/providers/openlineage/plugins/test_adapter.py index 260883470875f..b01fe46fdca13 100644 --- a/tests/providers/openlineage/plugins/test_adapter.py +++ b/tests/providers/openlineage/plugins/test_adapter.py @@ -40,7 +40,6 @@ from airflow import DAG from airflow.models.dagrun import DagRun, DagRunState from airflow.models.taskinstance import TaskInstance, TaskInstanceState -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator from airflow.providers.openlineage.conf import namespace from airflow.providers.openlineage.extractors import OperatorLineage @@ -52,6 +51,7 @@ ) from airflow.providers.openlineage.utils.utils import get_airflow_job_facet from airflow.utils.task_group import TaskGroup +from tests.test_utils.compat import BashOperator from tests.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/openlineage/plugins/test_facets.py b/tests/providers/openlineage/plugins/test_facets.py index 0ed5b4bf7c440..d46cadc9d69c6 100644 --- a/tests/providers/openlineage/plugins/test_facets.py +++ b/tests/providers/openlineage/plugins/test_facets.py @@ -81,7 +81,7 @@ def test_airflow_dag_run_facet(): }, tasks={ "task_0": { - "operator": "airflow.operators.bash.BashOperator", + "operator": "airflow.providers.standard.operators.bash.BashOperator", "task_group": None, "emits_ol_events": True, "ui_color": "#f0ede4", diff --git a/tests/providers/openlineage/plugins/test_utils.py b/tests/providers/openlineage/plugins/test_utils.py index 5335739a8ff92..65874a5ecebf3 100644 --- a/tests/providers/openlineage/plugins/test_utils.py +++ b/tests/providers/openlineage/plugins/test_utils.py @@ -29,7 +29,6 @@ from pkg_resources import parse_version from airflow.models import DAG as AIRFLOW_DAG, DagModel -from airflow.operators.bash import BashOperator from airflow.providers.openlineage.plugins.facets import AirflowDebugRunFacet from airflow.providers.openlineage.utils.utils import ( InfoJsonEncodable, @@ -44,11 +43,15 @@ from airflow.utils import timezone from airflow.utils.log.secrets_masker import _secrets_masker from airflow.utils.state import State -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS, BashOperator if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType +BASH_OPERATOR_PATH = "airflow.providers.standard.operators.bash" +if not AIRFLOW_V_2_10_PLUS: + BASH_OPERATOR_PATH = "airflow.operators.bash" + class SafeStrDict(dict): def __str__(self): @@ -262,7 +265,7 @@ def test_get_fully_qualified_class_name(): from airflow.providers.openlineage.plugins.adapter import OpenLineageAdapter result = get_fully_qualified_class_name(BashOperator(task_id="test", bash_command="exit 0;")) - assert result == "airflow.operators.bash.BashOperator" + assert result == f"{BASH_OPERATOR_PATH}.BashOperator" result = get_fully_qualified_class_name(OpenLineageAdapter()) assert result == "airflow.providers.openlineage.plugins.adapter.OpenLineageAdapter" @@ -278,7 +281,7 @@ def test_is_operator_disabled(mock_disabled_operators): assert is_operator_disabled(op) is False mock_disabled_operators.return_value = { - "airflow.operators.bash.BashOperator", + f"{BASH_OPERATOR_PATH}.BashOperator", "airflow.operators.python.PythonOperator", } assert is_operator_disabled(op) is True @@ -303,8 +306,6 @@ def test_includes_full_task_info(mock_include_full_task_info): @patch("airflow.providers.openlineage.conf.include_full_task_info") def test_does_not_include_full_task_info(mock_include_full_task_info): - from airflow.operators.bash import BashOperator - mock_include_full_task_info.return_value = False # There should be no 'bash_command' in excludes and it's not in includes - so # it's a good choice for checking TaskInfo vs TaskInfoComplete diff --git a/tests/providers/openlineage/utils/test_utils.py b/tests/providers/openlineage/utils/test_utils.py index d97a447e99949..20eba76adeb18 100644 --- a/tests/providers/openlineage/utils/test_utils.py +++ b/tests/providers/openlineage/utils/test_utils.py @@ -27,7 +27,6 @@ from airflow.models.dagrun import DagRun from airflow.models.mappedoperator import MappedOperator from airflow.models.taskinstance import TaskInstance, TaskInstanceState -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator from airflow.operators.python import PythonOperator from airflow.providers.openlineage.plugins.facets import AirflowDagRunFacet, AirflowJobFacet @@ -44,8 +43,13 @@ from airflow.serialization.serialized_objects import SerializedBaseOperator from airflow.utils.task_group import TaskGroup from airflow.utils.types import DagRunType +from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS, BashOperator from tests.test_utils.mock_operators import MockOperator +BASH_OPERATOR_PATH = "airflow.providers.standard.operators.bash" +if not AIRFLOW_V_2_10_PLUS: + BASH_OPERATOR_PATH = "airflow.operators.bash" + class CustomOperatorForTest(BashOperator): pass @@ -82,7 +86,7 @@ def test_get_airflow_job_facet(): }, tasks={ "task_0": { - "operator": "airflow.operators.bash.BashOperator", + "operator": f"{BASH_OPERATOR_PATH}.BashOperator", "task_group": None, "emits_ol_events": True, "ui_color": "#f0ede4", @@ -166,7 +170,7 @@ def test_get_airflow_dag_run_facet(): def test_get_fully_qualified_class_name_serialized_operator(): - op_module_path = "airflow.operators.bash" + op_module_path = BASH_OPERATOR_PATH op_name = "BashOperator" op = BashOperator(task_id="test", bash_command="echo 1") @@ -191,7 +195,7 @@ def test_get_fully_qualified_class_name_mapped_operator(): def test_get_fully_qualified_class_name_bash_operator(): result = get_fully_qualified_class_name(BashOperator(task_id="test", bash_command="echo 0;")) - expected_result = "airflow.operators.bash.BashOperator" + expected_result = f"{BASH_OPERATOR_PATH}.BashOperator" assert result == expected_result @@ -319,7 +323,7 @@ def sum_values(values: list[int]) -> int: ], }, "task_0": { - "operator": "airflow.operators.bash.BashOperator", + "operator": f"{BASH_OPERATOR_PATH}.BashOperator", "task_group": None, "emits_ol_events": True, "ui_color": BashOperator.ui_color, @@ -360,7 +364,7 @@ def sum_values(values: list[int]) -> int: ], }, "task_3": { - "operator": "airflow.operators.bash.BashOperator", + "operator": f"{BASH_OPERATOR_PATH}.BashOperator", "task_group": None, "emits_ol_events": True, "ui_color": BashOperator.ui_color, @@ -388,7 +392,7 @@ def sum_values(values: list[int]) -> int: ], }, "task_5": { - "operator": "airflow.operators.bash.BashOperator", + "operator": f"{BASH_OPERATOR_PATH}.BashOperator", "task_group": None, "emits_ol_events": True, "ui_color": BashOperator.ui_color, diff --git a/tests/operators/test_bash.py b/tests/providers/standard/operators/test_bash.py similarity index 99% rename from tests/operators/test_bash.py rename to tests/providers/standard/operators/test_bash.py index 8aacb3b7c77ea..2c29a0b96dc93 100644 --- a/tests/operators/test_bash.py +++ b/tests/providers/standard/operators/test_bash.py @@ -28,7 +28,7 @@ import pytest from airflow.exceptions import AirflowException, AirflowSkipException, AirflowTaskTimeout -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils import timezone from airflow.utils.state import State from airflow.utils.types import DagRunType diff --git a/tests/sensors/test_bash.py b/tests/providers/standard/sensors/test_bash.py similarity index 97% rename from tests/sensors/test_bash.py rename to tests/providers/standard/sensors/test_bash.py index 3282f6b971221..d51db033be308 100644 --- a/tests/sensors/test_bash.py +++ b/tests/providers/standard/sensors/test_bash.py @@ -23,7 +23,7 @@ from airflow.exceptions import AirflowFailException, AirflowSensorTimeout from airflow.models.dag import DAG -from airflow.sensors.bash import BashSensor +from airflow.providers.standard.sensors.bash import BashSensor class TestBashSensor: diff --git a/tests/sensors/test_external_task_sensor.py b/tests/sensors/test_external_task_sensor.py index 3d6268834dce2..9947a197a0335 100644 --- a/tests/sensors/test_external_task_sensor.py +++ b/tests/sensors/test_external_task_sensor.py @@ -35,9 +35,9 @@ from airflow.models.dag import DAG from airflow.models.serialized_dag import SerializedDagModel from airflow.models.xcom_arg import XComArg -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator from airflow.operators.python import PythonOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.providers.standard.sensors.time import TimeSensor from airflow.sensors.external_task import ( ExternalTaskMarker, diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py index d063b3e78035a..f0f517042314a 100644 --- a/tests/serialization/test_dag_serialization.py +++ b/tests/serialization/test_dag_serialization.py @@ -60,11 +60,11 @@ from airflow.models.mappedoperator import MappedOperator from airflow.models.param import Param, ParamsDict from airflow.models.xcom import XCom -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator +from airflow.providers.standard.operators.bash import BashOperator +from airflow.providers.standard.sensors.bash import BashSensor from airflow.security import permissions -from airflow.sensors.bash import BashSensor from airflow.serialization.enums import Encoding from airflow.serialization.json_schema import load_dag_schema_dict from airflow.serialization.serialized_objects import ( @@ -154,7 +154,7 @@ "template_fields_renderers": {"bash_command": "bash", "env": "json"}, "bash_command": "echo {{ task.task_id }}", "_task_type": "BashOperator", - "_task_module": "airflow.operators.bash", + "_task_module": "airflow.providers.standard.operators.bash", "pool": "default_pool", "is_setup": False, "is_teardown": False, @@ -2284,7 +2284,7 @@ def test_operator_expand_serde(): "_is_empty": False, "_is_mapped": True, "_needs_expansion": True, - "_task_module": "airflow.operators.bash", + "_task_module": "airflow.providers.standard.operators.bash", "_task_type": "BashOperator", "start_trigger_args": None, "start_from_trigger": False, diff --git a/tests/system/core/example_external_task_child_deferrable.py b/tests/system/core/example_external_task_child_deferrable.py index 9af83b7699ab1..781ad4ea5ef1f 100644 --- a/tests/system/core/example_external_task_child_deferrable.py +++ b/tests/system/core/example_external_task_child_deferrable.py @@ -19,7 +19,7 @@ from datetime import datetime from airflow import DAG -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator with DAG( dag_id="child_dag", diff --git a/tests/system/providers/amazon/aws/example_appflow.py b/tests/system/providers/amazon/aws/example_appflow.py index 0fb2764c0b71c..5ba38533b0211 100644 --- a/tests/system/providers/amazon/aws/example_appflow.py +++ b/tests/system/providers/amazon/aws/example_appflow.py @@ -20,7 +20,6 @@ from airflow.models.baseoperator import chain from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.amazon.aws.operators.appflow import ( AppflowRecordsShortCircuitOperator, AppflowRunAfterOperator, @@ -28,6 +27,7 @@ AppflowRunDailyOperator, AppflowRunFullOperator, ) +from airflow.providers.standard.operators.bash import BashOperator from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() diff --git a/tests/system/providers/amazon/aws/example_http_to_s3.py b/tests/system/providers/amazon/aws/example_http_to_s3.py index 3654140b4a1ed..d6424f9802155 100644 --- a/tests/system/providers/amazon/aws/example_http_to_s3.py +++ b/tests/system/providers/amazon/aws/example_http_to_s3.py @@ -23,9 +23,9 @@ from airflow.models.baseoperator import chain from airflow.models.connection import Connection from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.http_to_s3 import HttpToS3Operator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder diff --git a/tests/system/providers/amazon/aws/utils/k8s.py b/tests/system/providers/amazon/aws/utils/k8s.py index 551d09629e9d3..a882d9e42842e 100644 --- a/tests/system/providers/amazon/aws/utils/k8s.py +++ b/tests/system/providers/amazon/aws/utils/k8s.py @@ -18,7 +18,7 @@ from typing import TYPE_CHECKING -from airflow.operators.bash import BashOperator +from airflow.providers.standard.operators.bash import BashOperator if TYPE_CHECKING: from airflow.models.operator import Operator diff --git a/tests/system/providers/apache/hive/example_twitter_dag.py b/tests/system/providers/apache/hive/example_twitter_dag.py index 53b824f50923e..4ceb119ba551c 100644 --- a/tests/system/providers/apache/hive/example_twitter_dag.py +++ b/tests/system/providers/apache/hive/example_twitter_dag.py @@ -26,8 +26,8 @@ from airflow import DAG from airflow.decorators import task -from airflow.operators.bash import BashOperator from airflow.providers.apache.hive.operators.hive import HiveOperator +from airflow.providers.standard.operators.bash import BashOperator # -------------------------------------------------------------------------------- # Caveat: This Dag will not run because of missing scripts. diff --git a/tests/system/providers/apache/iceberg/example_iceberg.py b/tests/system/providers/apache/iceberg/example_iceberg.py index 0318a8e22b770..41e751624b5c0 100644 --- a/tests/system/providers/apache/iceberg/example_iceberg.py +++ b/tests/system/providers/apache/iceberg/example_iceberg.py @@ -19,8 +19,8 @@ from datetime import datetime, timedelta from airflow import DAG -from airflow.operators.bash import BashOperator from airflow.providers.apache.iceberg.hooks.iceberg import IcebergHook +from airflow.providers.standard.operators.bash import BashOperator bash_command = f""" echo "Our token: {IcebergHook().get_token_macro()}" diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes.py b/tests/system/providers/cncf/kubernetes/example_kubernetes.py index 57bab063a9e62..3756d0c4e21d8 100644 --- a/tests/system/providers/cncf/kubernetes/example_kubernetes.py +++ b/tests/system/providers/cncf/kubernetes/example_kubernetes.py @@ -27,9 +27,9 @@ from kubernetes.client import models as k8s from airflow import DAG -from airflow.operators.bash import BashOperator from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator from airflow.providers.cncf.kubernetes.secret import Secret +from airflow.providers.standard.operators.bash import BashOperator # [START howto_operator_k8s_cluster_resources] secret_file = Secret("volume", "/etc/sql_conn", "airflow-secrets", "sql_alchemy_conn") diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py b/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py index 881bfd61f7c8d..cb3d25a33fcbc 100644 --- a/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py +++ b/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py @@ -27,9 +27,9 @@ from kubernetes.client import models as k8s from airflow import DAG -from airflow.operators.bash import BashOperator from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator from airflow.providers.cncf.kubernetes.secret import Secret +from airflow.providers.standard.operators.bash import BashOperator # [START howto_operator_k8s_cluster_resources] secret_file = Secret("volume", "/etc/sql_conn", "airflow-secrets", "sql_alchemy_conn") diff --git a/tests/system/providers/docker/example_docker.py b/tests/system/providers/docker/example_docker.py index 069f4794de632..18f7d2f0ea0c6 100644 --- a/tests/system/providers/docker/example_docker.py +++ b/tests/system/providers/docker/example_docker.py @@ -21,8 +21,8 @@ from datetime import datetime from airflow import models -from airflow.operators.bash import BashOperator from airflow.providers.docker.operators.docker import DockerOperator +from airflow.providers.standard.operators.bash import BashOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") DAG_ID = "docker_test" diff --git a/tests/system/providers/docker/example_docker_copy_data.py b/tests/system/providers/docker/example_docker_copy_data.py index 50373af792515..4e4e8466e501f 100644 --- a/tests/system/providers/docker/example_docker_copy_data.py +++ b/tests/system/providers/docker/example_docker_copy_data.py @@ -32,9 +32,9 @@ from docker.types import Mount from airflow import models -from airflow.operators.bash import BashOperator from airflow.operators.python import ShortCircuitOperator from airflow.providers.docker.operators.docker import DockerOperator +from airflow.providers.standard.operators.bash import BashOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") DAG_ID = "docker_sample_copy_data" diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py index bd74e49d40964..004f996975be1 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py +++ b/tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py @@ -25,13 +25,13 @@ from datetime import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCreateEmptyDatasetOperator, BigQueryDeleteDatasetOperator, BigQueryGetDatasetOperator, BigQueryUpdateDatasetOperator, ) +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py index 6878a9822de1e..ab7a4b3757b9b 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +++ b/tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py @@ -25,7 +25,6 @@ from datetime import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCheckOperator, BigQueryColumnCheckOperator, @@ -38,6 +37,7 @@ BigQueryTableCheckOperator, BigQueryValueCheckOperator, ) +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py index 2f1ed573aa053..a007e1cd639c0 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +++ b/tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py @@ -25,7 +25,6 @@ from datetime import datetime, timedelta from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCheckOperator, BigQueryCreateEmptyDatasetOperator, @@ -36,6 +35,7 @@ BigQueryIntervalCheckOperator, BigQueryValueCheckOperator, ) +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py index 0f7acd8a14d36..e9b3269ecfb6c 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py +++ b/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py @@ -36,7 +36,6 @@ from airflow.decorators import task from airflow.models import Connection from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook from airflow.providers.google.cloud.hooks.compute_ssh import ComputeEngineSSHHook @@ -51,6 +50,7 @@ ComputeEngineInsertInstanceOperator, ) from airflow.providers.ssh.operators.ssh import SSHOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py b/tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py index 990820bfe28c6..4a3b0386da0f4 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py +++ b/tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py @@ -35,7 +35,6 @@ from airflow.decorators import task from airflow.models import Connection from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook from airflow.providers.google.cloud.hooks.compute_ssh import ComputeEngineSSHHook @@ -50,6 +49,7 @@ ) from airflow.providers.google.cloud.transfers.bigquery_to_postgres import BigQueryToPostgresOperator from airflow.providers.ssh.operators.ssh import SSHOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule diff --git a/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py b/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py index 42cfbc8808f81..cb31a3b4d091d 100644 --- a/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +++ b/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py @@ -31,7 +31,6 @@ from airflow.decorators import task_group from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.cloud_build import ( CloudBuildCancelBuildOperator, CloudBuildCreateBuildOperator, @@ -39,6 +38,7 @@ CloudBuildListBuildsOperator, CloudBuildRetryBuildOperator, ) +from airflow.providers.standard.operators.bash import BashOperator from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py b/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py index dce93182090d0..4884122751e09 100644 --- a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +++ b/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py @@ -32,7 +32,6 @@ from google.protobuf.field_mask_pb2 import FieldMask from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.cloud_memorystore import ( CloudMemorystoreMemcachedApplyParametersOperator, CloudMemorystoreMemcachedCreateInstanceOperator, @@ -42,6 +41,7 @@ CloudMemorystoreMemcachedUpdateInstanceOperator, CloudMemorystoreMemcachedUpdateParametersOperator, ) +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py b/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py index aeee437ef9b57..c46d966371dac 100644 --- a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +++ b/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py @@ -27,7 +27,6 @@ from google.cloud.redis_v1 import FailoverInstanceRequest, Instance from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.cloud_memorystore import ( CloudMemorystoreCreateInstanceAndImportOperator, CloudMemorystoreCreateInstanceOperator, @@ -46,6 +45,7 @@ GCSCreateBucketOperator, GCSDeleteBucketOperator, ) +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py b/tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py index 7d9046088a740..aebb1e3e7ed85 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py +++ b/tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py @@ -27,7 +27,6 @@ from airflow.models.baseoperator import chain from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.gcs import ( GCSCreateBucketOperator, GCSDeleteBucketOperator, @@ -35,6 +34,7 @@ GCSListObjectsOperator, ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py b/tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py index 9e92102fa924c..55bec85a50562 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +++ b/tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py @@ -29,7 +29,6 @@ from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.operators.python import PythonOperator from airflow.providers.google.cloud.operators.gcs import ( GCSCreateBucketOperator, @@ -39,6 +38,7 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py b/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py index 56fb0811b0621..a673ab88f722e 100644 --- a/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py +++ b/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py @@ -34,7 +34,6 @@ from airflow.decorators import task from airflow.models import Connection from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook from airflow.providers.google.cloud.hooks.compute_ssh import ComputeEngineSSHHook @@ -47,6 +46,7 @@ GCSDeleteBucketOperator, ) from airflow.providers.ssh.operators.ssh import SSHOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule diff --git a/tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py b/tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py index 31951cd023010..2860d8552e101 100644 --- a/tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py +++ b/tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py @@ -27,9 +27,9 @@ from airflow.models.baseoperator import chain from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.sftp_to_gcs import SFTPToGCSOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/tests/system/providers/google/cloud/gcs/example_sheets.py b/tests/system/providers/google/cloud/gcs/example_sheets.py index 958bd90360285..2247819494f91 100644 --- a/tests/system/providers/google/cloud/gcs/example_sheets.py +++ b/tests/system/providers/google/cloud/gcs/example_sheets.py @@ -26,11 +26,11 @@ from airflow.models import Connection from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.sheets_to_gcs import GoogleSheetsToGCSOperator from airflow.providers.google.suite.operators.sheets import GoogleSheetsCreateSpreadsheetOperator from airflow.providers.google.suite.transfers.gcs_to_sheets import GCSToGoogleSheetsOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule diff --git a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py b/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py index 031f8326ee99c..173fddad3a065 100644 --- a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py +++ b/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py @@ -25,12 +25,12 @@ from datetime import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.kubernetes_engine import ( GKECreateClusterOperator, GKEDeleteClusterOperator, GKEStartPodOperator, ) +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py b/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py index 5e3f4ddbf7044..e974a628c7a52 100644 --- a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py +++ b/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py @@ -25,12 +25,12 @@ from datetime import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.kubernetes_engine import ( GKECreateClusterOperator, GKEDeleteClusterOperator, GKEStartPodOperator, ) +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/tests/system/providers/google/cloud/ml_engine/example_mlengine.py b/tests/system/providers/google/cloud/ml_engine/example_mlengine.py index 87602da88c46c..bde2c0bbaf9ee 100644 --- a/tests/system/providers/google/cloud/ml_engine/example_mlengine.py +++ b/tests/system/providers/google/cloud/ml_engine/example_mlengine.py @@ -29,7 +29,6 @@ from google.protobuf.struct_pb2 import Value from airflow import models -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.gcs import ( GCSCreateBucketOperator, GCSDeleteBucketOperator, @@ -53,6 +52,7 @@ ListModelVersionsOperator, SetDefaultVersionOnModelOperator, ) +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default") diff --git a/tests/system/providers/google/cloud/natural_language/example_natural_language.py b/tests/system/providers/google/cloud/natural_language/example_natural_language.py index 5bc38f9220874..e04fdf4fb601b 100644 --- a/tests/system/providers/google/cloud/natural_language/example_natural_language.py +++ b/tests/system/providers/google/cloud/natural_language/example_natural_language.py @@ -27,13 +27,13 @@ from google.cloud.language_v1 import Document from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.natural_language import ( CloudNaturalLanguageAnalyzeEntitiesOperator, CloudNaturalLanguageAnalyzeEntitySentimentOperator, CloudNaturalLanguageAnalyzeSentimentOperator, CloudNaturalLanguageClassifyTextOperator, ) +from airflow.providers.standard.operators.bash import BashOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "gcp_natural_language" diff --git a/tests/system/providers/google/cloud/pubsub/example_pubsub.py b/tests/system/providers/google/cloud/pubsub/example_pubsub.py index 29ba6469ea0ce..4ff3091e5fa53 100644 --- a/tests/system/providers/google/cloud/pubsub/example_pubsub.py +++ b/tests/system/providers/google/cloud/pubsub/example_pubsub.py @@ -25,7 +25,6 @@ from datetime import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.pubsub import ( PubSubCreateSubscriptionOperator, PubSubCreateTopicOperator, @@ -35,6 +34,7 @@ PubSubPullOperator, ) from airflow.providers.google.cloud.sensors.pubsub import PubSubPullSensor +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py b/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py index cf256c8f9fa2f..11231c0dfd40c 100644 --- a/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py +++ b/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py @@ -34,7 +34,6 @@ from airflow.decorators import task from airflow.models import Connection from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook from airflow.providers.google.cloud.hooks.compute_ssh import ComputeEngineSSHHook @@ -45,6 +44,7 @@ from airflow.providers.google.suite.operators.sheets import GoogleSheetsCreateSpreadsheetOperator from airflow.providers.google.suite.transfers.sql_to_sheets import SQLToGoogleSheetsOperator from airflow.providers.ssh.operators.ssh import SSHOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.settings import Session, json from airflow.utils.trigger_rule import TriggerRule diff --git a/tests/system/providers/google/cloud/tasks/example_queue.py b/tests/system/providers/google/cloud/tasks/example_queue.py index 53919fb146da6..4c29b584f5bfc 100644 --- a/tests/system/providers/google/cloud/tasks/example_queue.py +++ b/tests/system/providers/google/cloud/tasks/example_queue.py @@ -35,7 +35,6 @@ from airflow.decorators import task from airflow.models.baseoperator import chain from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.tasks import ( CloudTasksQueueCreateOperator, CloudTasksQueueDeleteOperator, @@ -46,6 +45,7 @@ CloudTasksQueuesListOperator, CloudTasksQueueUpdateOperator, ) +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py b/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py index 8394c99fcef51..33a289c1ffa1c 100644 --- a/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py +++ b/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py @@ -33,7 +33,6 @@ from airflow.decorators import task from airflow.models import Connection from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook from airflow.providers.google.cloud.hooks.compute_ssh import ComputeEngineSSHHook @@ -47,6 +46,7 @@ ) from airflow.providers.google.cloud.transfers.postgres_to_gcs import PostgresToGCSOperator from airflow.providers.ssh.operators.ssh import SSHOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule diff --git a/tests/system/providers/google/cloud/translate/example_translate.py b/tests/system/providers/google/cloud/translate/example_translate.py index 87f424673ecff..b593060f6e5bb 100644 --- a/tests/system/providers/google/cloud/translate/example_translate.py +++ b/tests/system/providers/google/cloud/translate/example_translate.py @@ -25,8 +25,8 @@ from datetime import datetime from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.translate import CloudTranslateTextOperator +from airflow.providers.standard.operators.bash import BashOperator DAG_ID = "gcp_translate" diff --git a/tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py b/tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py index eae6a54a89c3c..499db2d6427ba 100644 --- a/tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +++ b/tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py @@ -33,7 +33,6 @@ from airflow.models.baseoperator import chain from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.operators.video_intelligence import ( CloudVideoIntelligenceDetectVideoExplicitContentOperator, @@ -41,6 +40,7 @@ CloudVideoIntelligenceDetectVideoShotsOperator, ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") diff --git a/tests/system/providers/google/cloud/vision/example_vision_annotate_image.py b/tests/system/providers/google/cloud/vision/example_vision_annotate_image.py index 1d6167c6866ee..2a4d7b75f1332 100644 --- a/tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +++ b/tests/system/providers/google/cloud/vision/example_vision_annotate_image.py @@ -22,7 +22,6 @@ from airflow.models.baseoperator import chain from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.operators.vision import ( CloudVisionDetectImageLabelsOperator, @@ -32,6 +31,7 @@ CloudVisionTextDetectOperator, ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule # [START howto_operator_vision_retry_import] diff --git a/tests/system/providers/google/datacatalog/example_datacatalog_entries.py b/tests/system/providers/google/datacatalog/example_datacatalog_entries.py index db7d74b18d985..47edfb96368f1 100644 --- a/tests/system/providers/google/datacatalog/example_datacatalog_entries.py +++ b/tests/system/providers/google/datacatalog/example_datacatalog_entries.py @@ -24,7 +24,6 @@ from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.datacatalog import ( CloudDataCatalogCreateEntryGroupOperator, CloudDataCatalogCreateEntryOperator, @@ -36,6 +35,7 @@ CloudDataCatalogUpdateEntryOperator, ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py b/tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py index 8061ecaf110ae..781d047c53469 100644 --- a/tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py +++ b/tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py @@ -25,7 +25,6 @@ from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.datacatalog import ( CloudDataCatalogCreateEntryGroupOperator, CloudDataCatalogCreateEntryOperator, @@ -38,6 +37,7 @@ CloudDataCatalogSearchCatalogOperator, ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py b/tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py index 6c1fa6f0cab45..b8dd9170c3c02 100644 --- a/tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +++ b/tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py @@ -24,7 +24,6 @@ from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.datacatalog import ( CloudDataCatalogCreateTagTemplateFieldOperator, CloudDataCatalogCreateTagTemplateOperator, @@ -35,6 +34,7 @@ CloudDataCatalogUpdateTagTemplateFieldOperator, CloudDataCatalogUpdateTagTemplateOperator, ) +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/tests/system/providers/google/datacatalog/example_datacatalog_tags.py b/tests/system/providers/google/datacatalog/example_datacatalog_tags.py index 28764c3e0336d..17397fcea2806 100644 --- a/tests/system/providers/google/datacatalog/example_datacatalog_tags.py +++ b/tests/system/providers/google/datacatalog/example_datacatalog_tags.py @@ -25,7 +25,6 @@ from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg -from airflow.operators.bash import BashOperator from airflow.providers.google.cloud.operators.datacatalog import ( CloudDataCatalogCreateEntryGroupOperator, CloudDataCatalogCreateEntryOperator, @@ -39,6 +38,7 @@ CloudDataCatalogUpdateTagOperator, ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator +from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID diff --git a/tests/system/providers/opsgenie/example_opsgenie_notifier.py b/tests/system/providers/opsgenie/example_opsgenie_notifier.py index a9cdd70de0125..10edf8debdaed 100644 --- a/tests/system/providers/opsgenie/example_opsgenie_notifier.py +++ b/tests/system/providers/opsgenie/example_opsgenie_notifier.py @@ -21,8 +21,8 @@ from datetime import datetime from airflow import DAG -from airflow.operators.bash import BashOperator from airflow.providers.opsgenie.notifications.opsgenie import send_opsgenie_notification +from airflow.providers.standard.operators.bash import BashOperator with DAG( "opsgenie_notifier", diff --git a/tests/system/providers/singularity/example_singularity.py b/tests/system/providers/singularity/example_singularity.py index d802fbb31e820..4b60c080dcd2c 100644 --- a/tests/system/providers/singularity/example_singularity.py +++ b/tests/system/providers/singularity/example_singularity.py @@ -21,8 +21,8 @@ from datetime import datetime, timedelta from airflow import DAG -from airflow.operators.bash import BashOperator from airflow.providers.singularity.operators.singularity import SingularityOperator +from airflow.providers.standard.operators.bash import BashOperator ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") DAG_ID = "singularity_sample" diff --git a/tests/test_utils/compat.py b/tests/test_utils/compat.py index 09f3653db82d8..67a111350daff 100644 --- a/tests/test_utils/compat.py +++ b/tests/test_utils/compat.py @@ -18,8 +18,6 @@ import contextlib import json -import os -from importlib.metadata import version from typing import TYPE_CHECKING, Any, cast from packaging.version import Version @@ -38,7 +36,6 @@ except ImportError: from airflow.models.errors import ImportError as ParseImportError # type: ignore[no-redef,attr-defined] - from airflow import __version__ as airflow_version AIRFLOW_VERSION = Version(airflow_version) @@ -53,6 +50,16 @@ # Compatibility for Airflow 2.7.* from airflow.models.baseoperator import BaseOperatorLink +try: + from airflow.providers.standard.operators.bash import BashOperator + from airflow.providers.standard.sensors.bash import BashSensor + from airflow.providers.standard.sensors.date_time import DateTimeSensor +except ImportError: + # Compatibility for Airflow < 2.10.* + from airflow.operators.bash import BashOperator # type: ignore[no-redef,attr-defined] + from airflow.sensors.bash import BashSensor # type: ignore[no-redef,attr-defined] + from airflow.sensors.date_time import DateTimeSensor # type: ignore[no-redef,attr-defined] + if TYPE_CHECKING: from airflow.models.asset import ( diff --git a/tests/utils/test_dot_renderer.py b/tests/utils/test_dot_renderer.py index 5cb52696f19ce..0376848fce829 100644 --- a/tests/utils/test_dot_renderer.py +++ b/tests/utils/test_dot_renderer.py @@ -23,13 +23,13 @@ import pytest from airflow.models.dag import DAG -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator from airflow.operators.python import PythonOperator from airflow.serialization.dag_dependency import DagDependency from airflow.utils import dot_renderer, timezone from airflow.utils.state import State from airflow.utils.task_group import TaskGroup +from tests.test_utils.compat import BashOperator from tests.test_utils.db import clear_db_dags START_DATE = timezone.utcnow() diff --git a/tests/utils/test_task_group.py b/tests/utils/test_task_group.py index 084d8c35ac03e..a6008dc58c03f 100644 --- a/tests/utils/test_task_group.py +++ b/tests/utils/test_task_group.py @@ -34,12 +34,12 @@ from airflow.models.baseoperator import BaseOperator from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator from airflow.operators.python import PythonOperator from airflow.utils.dag_edges import dag_edges from airflow.utils.task_group import TASKGROUP_ARGS_EXPECTED_TYPES, TaskGroup, task_group_to_dict from tests.models import DEFAULT_DATE +from tests.test_utils.compat import BashOperator def make_task(name, type_="classic"): diff --git a/tests/www/views/test_views_rendered.py b/tests/www/views/test_views_rendered.py index f3947b141a347..2d1754af29f65 100644 --- a/tests/www/views/test_views_rendered.py +++ b/tests/www/views/test_views_rendered.py @@ -28,7 +28,6 @@ from airflow.models.dag import DAG from airflow.models.renderedtifields import RenderedTaskInstanceFields from airflow.models.variable import Variable -from airflow.operators.bash import BashOperator from airflow.operators.python import PythonOperator from airflow.serialization.serialized_objects import SerializedDAG from airflow.utils import timezone @@ -36,7 +35,7 @@ from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType from tests.conftest import initial_db_init -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields from tests.test_utils.www import check_content_in_response, check_content_not_in_response diff --git a/tests/www/views/test_views_tasks.py b/tests/www/views/test_views_tasks.py index 7b65051724c27..4dcb7252a3658 100644 --- a/tests/www/views/test_views_tasks.py +++ b/tests/www/views/test_views_tasks.py @@ -34,7 +34,6 @@ from airflow.models.taskinstance import TaskInstance from airflow.models.taskreschedule import TaskReschedule from airflow.models.xcom import XCom -from airflow.operators.bash import BashOperator from airflow.operators.empty import EmptyOperator from airflow.providers.celery.executors.celery_executor import CeleryExecutor from airflow.security import permissions @@ -49,7 +48,7 @@ delete_roles, delete_user, ) -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator from tests.test_utils.config import conf_vars from tests.test_utils.db import clear_db_runs, clear_db_xcom from tests.test_utils.www import check_content_in_response, check_content_not_in_response, client_with_login From 00d6ae72e2c712a77a9ee2ac262fcecf414ddff0 Mon Sep 17 00:00:00 2001 From: rom sharon <33751805+romsharon98@users.noreply.github.com> Date: Wed, 9 Oct 2024 15:31:17 +0300 Subject: [PATCH 037/125] Fix mark as success when pod fails while fetching log (#42815) * fix: always defer once more after log fetching to ensure pod completion is handled * add tests * removing print --------- Co-authored-by: Jean-Eudes Peloye --- airflow/providers/cncf/kubernetes/operators/pod.py | 4 +--- .../providers/cncf/kubernetes/operators/test_pod.py | 12 ++++++++++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/airflow/providers/cncf/kubernetes/operators/pod.py b/airflow/providers/cncf/kubernetes/operators/pod.py index 5b9e57ec01743..68081b5a67075 100644 --- a/airflow/providers/cncf/kubernetes/operators/pod.py +++ b/airflow/providers/cncf/kubernetes/operators/pod.py @@ -790,9 +790,7 @@ def trigger_reentry(self, context: Context, event: dict[str, Any]) -> Any: since_time=last_log_time, ) - if pod_log_status.running: - self.log.info("Container still running; deferring again.") - self.invoke_defer_method(pod_log_status.last_log_time) + self.invoke_defer_method(pod_log_status.last_log_time) else: self.invoke_defer_method() diff --git a/tests/providers/cncf/kubernetes/operators/test_pod.py b/tests/providers/cncf/kubernetes/operators/test_pod.py index be8279bcabd9a..a4ccb4b44b4bf 100644 --- a/tests/providers/cncf/kubernetes/operators/test_pod.py +++ b/tests/providers/cncf/kubernetes/operators/test_pod.py @@ -1780,6 +1780,18 @@ def test_process_duplicate_label_pods__pod_removed_if_delete_pod( process_pod_deletion_mock.assert_called_once_with(pod_1) assert result.metadata.name == pod_2.metadata.name + @patch(POD_MANAGER_CLASS.format("fetch_container_logs")) + @patch(KUB_OP_PATH.format("invoke_defer_method")) + def test_defere_call_one_more_time_after_error(self, invoke_defer_method, fetch_container_logs): + fetch_container_logs.return_value = PodLoggingStatus(False, None) + op = KubernetesPodOperator(task_id="test_task", name="test-pod", get_logs=True) + + op.trigger_reentry( + create_context(op), event={"name": TEST_NAME, "namespace": TEST_NAMESPACE, "status": "running"} + ) + + invoke_defer_method.assert_called_with(None) + class TestSuppress: def test__suppress(self, caplog): From c2486b7c090b1175d3f5c276214bee4a791b8837 Mon Sep 17 00:00:00 2001 From: John Bampton Date: Thu, 10 Oct 2024 00:00:11 +1000 Subject: [PATCH 038/125] Fix spelling; `Airlfow` -> `Airflow` (#42855) --- airflow/providers/elasticsearch/CHANGELOG.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airflow/providers/elasticsearch/CHANGELOG.rst b/airflow/providers/elasticsearch/CHANGELOG.rst index d54ea95a1185b..2e51c8572b227 100644 --- a/airflow/providers/elasticsearch/CHANGELOG.rst +++ b/airflow/providers/elasticsearch/CHANGELOG.rst @@ -502,7 +502,7 @@ Bug Fixes 3.0.1 (YANKED) .............. -.. warning:: This release has been **yanked** with a reason: ``Elasticsearch provider is incompatible with Airlfow <2.3`` +.. warning:: This release has been **yanked** with a reason: ``Elasticsearch provider is incompatible with Airflow <2.3`` Misc ~~~~~ @@ -512,7 +512,7 @@ Misc 3.0.0 (YANKED) .............. -.. warning:: This release has been **yanked** with a reason: ``Elasticsearch provider is incompatible with Airlfow <2.3`` +.. warning:: This release has been **yanked** with a reason: ``Elasticsearch provider is incompatible with Airflow <2.3`` Breaking changes ~~~~~~~~~~~~~~~~ From 51c35700860e8cf102e091a1a44d72efe4170692 Mon Sep 17 00:00:00 2001 From: Kalyan R Date: Wed, 9 Oct 2024 20:02:39 +0530 Subject: [PATCH 039/125] Docs: Add Template field related info for python operators (#42847) closes #39584 This PR adds templating information related to Python operators in How-to Guide --- docs/apache-airflow/howto/operator/python.rst | 40 +++++++++++++------ 1 file changed, 28 insertions(+), 12 deletions(-) diff --git a/docs/apache-airflow/howto/operator/python.rst b/docs/apache-airflow/howto/operator/python.rst index 2f0defddd886c..e68a257bcd67e 100644 --- a/docs/apache-airflow/howto/operator/python.rst +++ b/docs/apache-airflow/howto/operator/python.rst @@ -79,7 +79,7 @@ Airflow passes in an additional set of keyword arguments: one for each of the :ref:`Jinja template variables ` and a ``templates_dict`` argument. -The ``templates_dict`` argument is templated, so each value in the dictionary +``templates_dict``, ``op_args``, ``op_kwargs`` arguments are templated, so each value in the dictionary is evaluated as a :ref:`Jinja template `. .. tab-set:: @@ -182,6 +182,7 @@ Otherwise you won't have access to the most context variables of Airflow in ``op If you want the context related to datetime objects like ``data_interval_start`` you can add ``pendulum`` and ``lazy_object_proxy``. + .. important:: The Python function body defined to be executed is cut out of the DAG into a temporary file w/o surrounding code. As in the examples you need to add all imports again and you can not rely on variables from the global Python context. @@ -199,6 +200,11 @@ If additional parameters for package installation are needed pass them in via th All supported options are listed in the `requirements file format `_. +Templating +^^^^^^^^^^ + +Jinja templating can be used in same way as described for the :ref:`howto/operator:PythonOperator`. + Virtual environment setup options ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -336,6 +342,11 @@ If you want the context related to datetime objects like ``data_interval_start`` If you want to pass variables into the classic :class:`~airflow.operators.python.ExternalPythonOperator` use ``op_args`` and ``op_kwargs``. +Templating +^^^^^^^^^^ + +Jinja templating can be used in same way as described for the :ref:`howto/operator:PythonOperator`. + Context ^^^^^^^ @@ -393,7 +404,10 @@ tasks. :start-after: [START howto_operator_branch_python] :end-before: [END howto_operator_branch_python] -Argument passing and templating options are the same like with :ref:`howto/operator:PythonOperator`. +Passing in arguments and Templating +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Argument passing and templating options are the same as with :ref:`howto/operator:PythonOperator`. .. _howto/operator:BranchPythonVirtualenvOperator: @@ -427,7 +441,10 @@ tasks and is a hybrid of the :class:`~airflow.operators.python.PythonBranchOpera :start-after: [START howto_operator_branch_virtualenv] :end-before: [END howto_operator_branch_virtualenv] -Argument passing and templating options are the same like with :ref:`howto/operator:PythonVirtualenvOperator`. +Passing in arguments and Templating +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Argument passing and templating options are the same as with :ref:`howto/operator:PythonOperator`. .. _howto/operator:BranchExternalPythonOperator: @@ -462,7 +479,11 @@ external Python environment. :start-after: [START howto_operator_branch_ext_py] :end-before: [END howto_operator_branch_ext_py] -Argument passing and templating options are the same like with :ref:`howto/operator:ExternalPythonOperator`. + +Passing in arguments and Templating +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Argument passing and templating options are the same as with :ref:`howto/operator:PythonOperator`. .. _howto/operator:ShortCircuitOperator: @@ -538,16 +559,11 @@ tasks have completed running regardless of status (i.e. the ``TriggerRule.ALL_DO :start-after: [START howto_operator_short_circuit_trigger_rules] :end-before: [END howto_operator_short_circuit_trigger_rules] -Passing in arguments -^^^^^^^^^^^^^^^^^^^^ -Pass extra arguments to the ``@task.short_circuit``-decorated function as you would with a normal Python function. - - -Templating -^^^^^^^^^^ +Passing in arguments and Templating +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Jinja templating can be used in same way as described for the PythonOperator. +Argument passing and templating options are the same as with :ref:`howto/operator:PythonOperator`. .. _howto/operator:PythonSensor: From e2f265370ddc73a38b1cf18ee66aa753b49c83ed Mon Sep 17 00:00:00 2001 From: Pierre Jeambrun Date: Wed, 9 Oct 2024 23:03:55 +0800 Subject: [PATCH 040/125] AIP-84 Get Variable (#42834) Handle redacted value for Variables --- .../endpoints/variable_endpoint.py | 1 + airflow/api_fastapi/openapi/v1-generated.yaml | 66 +++++++++++++++ airflow/api_fastapi/serializers/variables.py | 49 ++++++++++++ airflow/api_fastapi/views/public/variables.py | 16 ++++ airflow/ui/openapi-gen/queries/common.ts | 16 ++++ airflow/ui/openapi-gen/queries/prefetch.ts | 21 +++++ airflow/ui/openapi-gen/queries/queries.ts | 29 +++++++ airflow/ui/openapi-gen/queries/suspense.ts | 30 +++++++ .../ui/openapi-gen/requests/schemas.gen.ts | 35 ++++++++ .../ui/openapi-gen/requests/services.gen.ts | 28 +++++++ airflow/ui/openapi-gen/requests/types.gen.ts | 40 ++++++++++ .../views/public/test_variables.py | 80 +++++++++++++++++-- 12 files changed, 406 insertions(+), 5 deletions(-) create mode 100644 airflow/api_fastapi/serializers/variables.py diff --git a/airflow/api_connexion/endpoints/variable_endpoint.py b/airflow/api_connexion/endpoints/variable_endpoint.py index 9413f9158652d..8efddb58419c4 100644 --- a/airflow/api_connexion/endpoints/variable_endpoint.py +++ b/airflow/api_connexion/endpoints/variable_endpoint.py @@ -59,6 +59,7 @@ def delete_variable(*, variable_key: str) -> Response: return Response(status=HTTPStatus.NO_CONTENT) +@mark_fastapi_migration_done @security.requires_access_variable("GET") @provide_session def get_variable(*, variable_key: str, session: Session = NEW_SESSION) -> Response: diff --git a/airflow/api_fastapi/openapi/v1-generated.yaml b/airflow/api_fastapi/openapi/v1-generated.yaml index 28723b800879e..463cc1e92f4d6 100644 --- a/airflow/api_fastapi/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/openapi/v1-generated.yaml @@ -535,6 +535,50 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + get: + tags: + - Variable + summary: Get Variable + description: Get a variable entry. + operationId: get_variable + parameters: + - name: variable_key + in: path + required: true + schema: + type: string + title: Variable Key + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VariableResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' components: schemas: ConnectionResponse: @@ -1193,3 +1237,25 @@ components: - msg - type title: ValidationError + VariableResponse: + properties: + key: + type: string + title: Key + value: + anyOf: + - type: string + - type: 'null' + title: Value + description: + anyOf: + - type: string + - type: 'null' + title: Description + type: object + required: + - key + - value + - description + title: VariableResponse + description: Variable serializer for responses. diff --git a/airflow/api_fastapi/serializers/variables.py b/airflow/api_fastapi/serializers/variables.py new file mode 100644 index 0000000000000..ded268432b89d --- /dev/null +++ b/airflow/api_fastapi/serializers/variables.py @@ -0,0 +1,49 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import json + +from pydantic import BaseModel, ConfigDict, Field, model_validator +from typing_extensions import Self + +from airflow.utils.log.secrets_masker import redact + + +class VariableResponse(BaseModel): + """Variable serializer for responses.""" + + model_config = ConfigDict(populate_by_name=True) + + key: str + val: str | None = Field(alias="value") + description: str | None + + @model_validator(mode="after") + def redact_val(self) -> Self: + if self.val is None: + return self + try: + val_dict = json.loads(self.val) + redacted_dict = redact(val_dict, max_depth=1) + self.val = json.dumps(redacted_dict) + return self + except json.JSONDecodeError: + # value is not a serialized string representation of a dict. + self.val = redact(self.val, self.key) + return self diff --git a/airflow/api_fastapi/views/public/variables.py b/airflow/api_fastapi/views/public/variables.py index e4edb8601fd09..e6cbb136f1cac 100644 --- a/airflow/api_fastapi/views/public/variables.py +++ b/airflow/api_fastapi/views/public/variables.py @@ -17,11 +17,13 @@ from __future__ import annotations from fastapi import Depends, HTTPException +from sqlalchemy import select from sqlalchemy.orm import Session from typing_extensions import Annotated from airflow.api_fastapi.db.common import get_session from airflow.api_fastapi.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.serializers.variables import VariableResponse from airflow.api_fastapi.views.router import AirflowRouter from airflow.models.variable import Variable @@ -40,3 +42,17 @@ async def delete_variable( """Delete a variable entry.""" if Variable.delete(variable_key, session) == 0: raise HTTPException(404, f"The Variable with key: `{variable_key}` was not found") + + +@variables_router.get("/{variable_key}", responses=create_openapi_http_exception_doc([401, 403, 404])) +async def get_variable( + variable_key: str, + session: Annotated[Session, Depends(get_session)], +) -> VariableResponse: + """Get a variable entry.""" + variable = session.scalar(select(Variable).where(Variable.key == variable_key).limit(1)) + + if variable is None: + raise HTTPException(404, f"The Variable with key: `{variable_key}` was not found") + + return VariableResponse.model_validate(variable, from_attributes=True) diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index fbbbac5d60625..72fd0ef9ccde1 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -134,6 +134,22 @@ export const UseConnectionServiceGetConnectionKeyFn = ( useConnectionServiceGetConnectionKey, ...(queryKey ?? [{ connectionId }]), ]; +export type VariableServiceGetVariableDefaultResponse = Awaited< + ReturnType +>; +export type VariableServiceGetVariableQueryResult< + TData = VariableServiceGetVariableDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useVariableServiceGetVariableKey = "VariableServiceGetVariable"; +export const UseVariableServiceGetVariableKeyFn = ( + { + variableKey, + }: { + variableKey: string; + }, + queryKey?: Array, +) => [useVariableServiceGetVariableKey, ...(queryKey ?? [{ variableKey }])]; export type DagServicePatchDagsMutationResult = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow/ui/openapi-gen/queries/prefetch.ts index 7c8555b29d264..a114b9dc92c6c 100644 --- a/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow/ui/openapi-gen/queries/prefetch.ts @@ -6,6 +6,7 @@ import { ConnectionService, DagService, DashboardService, + VariableService, } from "../requests/services.gen"; import { DagRunState } from "../requests/types.gen"; import * as Common from "./common"; @@ -165,3 +166,23 @@ export const prefetchUseConnectionServiceGetConnection = ( queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }), queryFn: () => ConnectionService.getConnection({ connectionId }), }); +/** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const prefetchUseVariableServiceGetVariable = ( + queryClient: QueryClient, + { + variableKey, + }: { + variableKey: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }), + queryFn: () => VariableService.getVariable({ variableKey }), + }); diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index 9137ea4ed01bc..a3ce02257160a 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -208,6 +208,35 @@ export const useConnectionServiceGetConnection = < queryFn: () => ConnectionService.getConnection({ connectionId }) as TData, ...options, }); +/** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const useVariableServiceGetVariable = < + TData = Common.VariableServiceGetVariableDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + variableKey, + }: { + variableKey: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseVariableServiceGetVariableKeyFn( + { variableKey }, + queryKey, + ), + queryFn: () => VariableService.getVariable({ variableKey }) as TData, + ...options, + }); /** * Patch Dags * Patch multiple DAGs. diff --git a/airflow/ui/openapi-gen/queries/suspense.ts b/airflow/ui/openapi-gen/queries/suspense.ts index 70be4beb0d479..fbef843c6e0ab 100644 --- a/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow/ui/openapi-gen/queries/suspense.ts @@ -6,6 +6,7 @@ import { ConnectionService, DagService, DashboardService, + VariableService, } from "../requests/services.gen"; import { DagRunState } from "../requests/types.gen"; import * as Common from "./common"; @@ -202,3 +203,32 @@ export const useConnectionServiceGetConnectionSuspense = < queryFn: () => ConnectionService.getConnection({ connectionId }) as TData, ...options, }); +/** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const useVariableServiceGetVariableSuspense = < + TData = Common.VariableServiceGetVariableDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + variableKey, + }: { + variableKey: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseVariableServiceGetVariableKeyFn( + { variableKey }, + queryKey, + ), + queryFn: () => VariableService.getVariable({ variableKey }) as TData, + ...options, + }); diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index 26fe1180ae7b5..8f76ebd13c40f 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -1019,3 +1019,38 @@ export const $ValidationError = { required: ["loc", "msg", "type"], title: "ValidationError", } as const; + +export const $VariableResponse = { + properties: { + key: { + type: "string", + title: "Key", + }, + value: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Value", + }, + description: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Description", + }, + }, + type: "object", + required: ["key", "value", "description"], + title: "VariableResponse", + description: "Variable serializer for responses.", +} as const; diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 268f636404b16..7f61fd32f3493 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -21,6 +21,8 @@ import type { GetConnectionResponse, DeleteVariableData, DeleteVariableResponse, + GetVariableData, + GetVariableResponse, } from "./types.gen"; export class AssetService { @@ -305,4 +307,30 @@ export class VariableService { }, }); } + + /** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ + public static getVariable( + data: GetVariableData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/variables/{variable_key}", + path: { + variable_key: data.variableKey, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } } diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 268960a596870..7b5fc54065a01 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -206,6 +206,15 @@ export type ValidationError = { type: string; }; +/** + * Variable serializer for responses. + */ +export type VariableResponse = { + key: string; + value: string | null; + description: string | null; +}; + export type NextRunAssetsData = { dagId: string; }; @@ -283,6 +292,12 @@ export type DeleteVariableData = { export type DeleteVariableResponse = void; +export type GetVariableData = { + variableKey: string; +}; + +export type GetVariableResponse = VariableResponse; + export type $OpenApiTs = { "/ui/next_run_assets/{dag_id}": { get: { @@ -504,5 +519,30 @@ export type $OpenApiTs = { 422: HTTPValidationError; }; }; + get: { + req: GetVariableData; + res: { + /** + * Successful Response + */ + 200: VariableResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; }; diff --git a/tests/api_fastapi/views/public/test_variables.py b/tests/api_fastapi/views/public/test_variables.py index 56d65b98b6589..0ee2cebd77340 100644 --- a/tests/api_fastapi/views/public/test_variables.py +++ b/tests/api_fastapi/views/public/test_variables.py @@ -25,15 +25,41 @@ pytestmark = pytest.mark.db_test TEST_VARIABLE_KEY = "test_variable_key" -TEST_VARIABLE_VAL = 3 +TEST_VARIABLE_VALUE = "test_variable_value" TEST_VARIABLE_DESCRIPTION = "Some description for the variable" -TEST_CONN_TYPE = "test_type" + + +TEST_VARIABLE_KEY2 = "password" +TEST_VARIABLE_VALUE2 = "some_password" +TEST_VARIABLE_DESCRIPTION2 = "Some description for the password" + + +TEST_VARIABLE_KEY3 = "dictionary_password" +TEST_VARIABLE_VALUE3 = '{"password": "some_password"}' +TEST_VARIABLE_DESCRIPTION3 = "Some description for the variable" @provide_session def _create_variable(session) -> None: Variable.set( - key=TEST_VARIABLE_KEY, value=TEST_VARIABLE_VAL, description=TEST_VARIABLE_DESCRIPTION, session=session + key=TEST_VARIABLE_KEY, + value=TEST_VARIABLE_VALUE, + description=TEST_VARIABLE_DESCRIPTION, + session=session, + ) + + Variable.set( + key=TEST_VARIABLE_KEY2, + value=TEST_VARIABLE_VALUE2, + description=TEST_VARIABLE_DESCRIPTION2, + session=session, + ) + + Variable.set( + key=TEST_VARIABLE_KEY3, + value=TEST_VARIABLE_VALUE3, + description=TEST_VARIABLE_DESCRIPTION3, + session=session, ) @@ -53,14 +79,58 @@ class TestDeleteVariable(TestVariableEndpoint): def test_delete_should_respond_204(self, test_client, session): self.create_variable() variables = session.query(Variable).all() - assert len(variables) == 1 + assert len(variables) == 3 response = test_client.delete(f"/public/variables/{TEST_VARIABLE_KEY}") assert response.status_code == 204 variables = session.query(Variable).all() - assert len(variables) == 0 + assert len(variables) == 2 def test_delete_should_respond_404(self, test_client): response = test_client.delete(f"/public/variables/{TEST_VARIABLE_KEY}") assert response.status_code == 404 body = response.json() assert f"The Variable with key: `{TEST_VARIABLE_KEY}` was not found" == body["detail"] + + +class TestGetVariable(TestVariableEndpoint): + @pytest.mark.enable_redact + @pytest.mark.parametrize( + "key, expected_response", + [ + ( + TEST_VARIABLE_KEY, + { + "key": TEST_VARIABLE_KEY, + "value": TEST_VARIABLE_VALUE, + "description": TEST_VARIABLE_DESCRIPTION, + }, + ), + ( + TEST_VARIABLE_KEY2, + { + "key": TEST_VARIABLE_KEY2, + "value": "***", + "description": TEST_VARIABLE_DESCRIPTION2, + }, + ), + ( + TEST_VARIABLE_KEY3, + { + "key": TEST_VARIABLE_KEY3, + "value": '{"password": "***"}', + "description": TEST_VARIABLE_DESCRIPTION3, + }, + ), + ], + ) + def test_get_should_respond_200(self, test_client, session, key, expected_response): + self.create_variable() + response = test_client.get(f"/public/variables/{key}") + assert response.status_code == 200 + assert response.json() == expected_response + + def test_get_should_respond_404(self, test_client): + response = test_client.get(f"/public/variables/{TEST_VARIABLE_KEY}") + assert response.status_code == 404 + body = response.json() + assert f"The Variable with key: `{TEST_VARIABLE_KEY}` was not found" == body["detail"] From 26d7919387d960aca91601733adbb54c1f050d16 Mon Sep 17 00:00:00 2001 From: Daniel Standish <15932138+dstandish@users.noreply.github.com> Date: Wed, 9 Oct 2024 08:15:37 -0700 Subject: [PATCH 041/125] Move test of DagRun.update_state to better place (#42845) Previously this lived in test_scheduler_job.py It only really tested the behavior of DagRun.update_state. As far as I can tell, it checks that if you null out the state on a TI of a finished dag, and then you call ``update_state``, then the DR will be set to running. --- tests/jobs/test_scheduler_job.py | 35 ------------------------------ tests/models/test_dagrun.py | 37 +++++++++++++++++++++++++++++++- 2 files changed, 36 insertions(+), 36 deletions(-) diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index d26369f5d728e..bc2be0a12c743 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -2950,41 +2950,6 @@ def test_dagrun_root_fail(self): dagrun_state=State.FAILED, ) - def test_dagrun_root_fail_unfinished(self): - """ - DagRuns with one unfinished and one failed root task -> RUNNING - """ - # TODO: this should live in test_dagrun.py - # Run both the failed and successful tasks - dag_id = "test_dagrun_states_root_fail_unfinished" - dag = self.dagbag.get_dag(dag_id) - data_interval = dag.infer_automated_data_interval(DEFAULT_LOGICAL_DATE) - triggered_by_kwargs = {"triggered_by": DagRunTriggeredByType.TEST} if AIRFLOW_V_3_0_PLUS else {} - dr = dag.create_dagrun( - run_type=DagRunType.SCHEDULED, - execution_date=DEFAULT_DATE, - state=None, - data_interval=data_interval, - **triggered_by_kwargs, - ) - self.null_exec.mock_task_fail(dag_id, "test_dagrun_fail", dr.run_id) - - # todo: AIP-78 remove this test along with DAG.run() - # this only tests the backfill job runner, not the scheduler - with pytest.warns(RemovedInAirflow3Warning): - for _ in _mock_executor(self.null_exec): - with pytest.raises(AirflowException): - dag.run(start_date=dr.execution_date, end_date=dr.execution_date) - - # Mark the successful task as never having run since we want to see if the - # dagrun will be in a running state despite having an unfinished task. - with create_session() as session: - ti = dr.get_task_instance("test_dagrun_unfinished", session=session) - ti.state = State.NONE - session.commit() - dr.update_state() - assert dr.state == State.RUNNING - def test_dagrun_root_after_dagrun_unfinished(self, mock_executor): """ DagRuns with one successful and one future root task -> SUCCESS diff --git a/tests/models/test_dagrun.py b/tests/models/test_dagrun.py index 9184f561b3df9..dac2982b0ba34 100644 --- a/tests/models/test_dagrun.py +++ b/tests/models/test_dagrun.py @@ -37,7 +37,7 @@ from airflow.models.taskmap import TaskMap from airflow.models.taskreschedule import TaskReschedule from airflow.operators.empty import EmptyOperator -from airflow.operators.python import ShortCircuitOperator +from airflow.operators.python import PythonOperator, ShortCircuitOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.serialization.serialized_objects import SerializedDAG from airflow.stats import Stats @@ -1050,6 +1050,41 @@ def test_states_sets(self, session): assert ti_success.state in State.success_states assert ti_failed.state in State.failed_states + def test_update_state_one_unfinished(self, dag_maker, session): + """ + Previously this lived in test_scheduler_job.py + + It only really tested the behavior of DagRun.update_state. + + As far as I can tell, it checks that if you null out the state on a TI of a finished dag, + and then you call ``update_state``, then the DR will be set to running. + """ + with dag_maker(session=session) as dag: + PythonOperator(task_id="t1", python_callable=lambda: print) + PythonOperator(task_id="t2", python_callable=lambda: print) + dr = dag.create_dagrun( + state=DagRunState.FAILED, + triggered_by=DagRunTriggeredByType.TEST, + run_id="abc123", + session=session, + ) + for ti in dr.get_task_instances(session=session): + ti.state = TaskInstanceState.FAILED + session.commit() + session.expunge_all() + dr = session.get(DagRun, dr.id) + assert dr.state == DagRunState.FAILED + ti = dr.get_task_instance("t1", session=session) + ti.state = State.NONE + session.commit() + dr = session.get(DagRun, dr.id) + assert dr.state == DagRunState.FAILED + dr.dag = dag + dr.update_state(session=session) + session.commit() + dr = session.get(DagRun, dr.id) + assert dr.state == State.RUNNING + @pytest.mark.parametrize( ("run_type", "expected_tis"), From d395c1f1f8dad18a1daf5275a522f4f62bcf6aec Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Wed, 9 Oct 2024 17:28:06 +0200 Subject: [PATCH 042/125] Revert "Remove `sqlalchemy-redshift` dependency from Amazon provider (#42830)" (#42864) This reverts commit 3b4c73a644f40013927fb72227dfe018dc47a97f. --- airflow/providers/amazon/provider.yaml | 1 + docs/apache-airflow-providers-amazon/index.rst | 1 + generated/provider_dependencies.json | 1 + 3 files changed, 3 insertions(+) diff --git a/airflow/providers/amazon/provider.yaml b/airflow/providers/amazon/provider.yaml index 741efb70e7e51..1316cd05231a0 100644 --- a/airflow/providers/amazon/provider.yaml +++ b/airflow/providers/amazon/provider.yaml @@ -105,6 +105,7 @@ dependencies: - watchtower>=3.0.0,!=3.3.0,<4 - jsonpath_ng>=1.5.3 - redshift_connector>=2.0.918 + - sqlalchemy_redshift>=0.8.6 - asgiref>=2.3.0 - PyAthena>=3.0.10 - jmespath>=0.7.0 diff --git a/docs/apache-airflow-providers-amazon/index.rst b/docs/apache-airflow-providers-amazon/index.rst index 88e869bba8016..c58847dcd8876 100644 --- a/docs/apache-airflow-providers-amazon/index.rst +++ b/docs/apache-airflow-providers-amazon/index.rst @@ -119,6 +119,7 @@ PIP package Version required ``watchtower`` ``>=3.0.0,!=3.3.0,<4`` ``jsonpath_ng`` ``>=1.5.3`` ``redshift_connector`` ``>=2.0.918`` +``sqlalchemy_redshift`` ``>=0.8.6`` ``asgiref`` ``>=2.3.0`` ``PyAthena`` ``>=3.0.10`` ``jmespath`` ``>=0.7.0`` diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index 57bca636ed605..2d0ab90a3508b 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -38,6 +38,7 @@ "jsonpath_ng>=1.5.3", "python3-saml>=1.16.0", "redshift_connector>=2.0.918", + "sqlalchemy_redshift>=0.8.6", "watchtower>=3.0.0,!=3.3.0,<4" ], "devel-deps": [ From 2bb862846358d1c5a59b354adb39bc68d5aeae5e Mon Sep 17 00:00:00 2001 From: Elad Kalif <45845474+eladkal@users.noreply.github.com> Date: Wed, 9 Oct 2024 23:10:46 +0700 Subject: [PATCH 043/125] Prepare docs for Oct 1st adhoc wave of providers (#42862) * Prepare docs for Oct 1st adhoc wave of providers * Update * update amazon --- airflow/providers/amazon/CHANGELOG.rst | 58 +++++++++++++------ airflow/providers/amazon/__init__.py | 2 +- airflow/providers/amazon/provider.yaml | 3 +- airflow/providers/apache/beam/CHANGELOG.rst | 12 ++++ airflow/providers/apache/beam/__init__.py | 2 +- airflow/providers/apache/beam/provider.yaml | 3 +- airflow/providers/apache/kafka/CHANGELOG.rst | 12 ++++ airflow/providers/apache/kafka/__init__.py | 2 +- airflow/providers/apache/kafka/provider.yaml | 3 +- airflow/providers/apache/spark/CHANGELOG.rst | 8 +++ airflow/providers/apache/spark/__init__.py | 2 +- airflow/providers/apache/spark/provider.yaml | 3 +- airflow/providers/celery/CHANGELOG.rst | 18 ++++++ airflow/providers/celery/__init__.py | 2 +- airflow/providers/celery/provider.yaml | 3 +- airflow/providers/cloudant/CHANGELOG.rst | 12 ++++ airflow/providers/cloudant/__init__.py | 2 +- airflow/providers/cloudant/provider.yaml | 3 +- .../providers/cncf/kubernetes/CHANGELOG.rst | 26 ++++++++- airflow/providers/cncf/kubernetes/__init__.py | 2 +- airflow/providers/common/compat/CHANGELOG.rst | 12 ++++ airflow/providers/common/compat/__init__.py | 2 +- airflow/providers/common/compat/provider.yaml | 3 +- airflow/providers/common/io/CHANGELOG.rst | 13 +++++ airflow/providers/common/io/__init__.py | 2 +- airflow/providers/common/io/provider.yaml | 3 +- airflow/providers/common/sql/CHANGELOG.rst | 17 ++++++ airflow/providers/common/sql/__init__.py | 2 +- airflow/providers/common/sql/provider.yaml | 3 +- airflow/providers/databricks/CHANGELOG.rst | 18 ++++++ airflow/providers/databricks/__init__.py | 2 +- airflow/providers/databricks/provider.yaml | 3 +- airflow/providers/dbt/cloud/CHANGELOG.rst | 17 ++++++ airflow/providers/dbt/cloud/__init__.py | 2 +- airflow/providers/dbt/cloud/provider.yaml | 3 +- airflow/providers/elasticsearch/CHANGELOG.rst | 13 +++++ airflow/providers/elasticsearch/__init__.py | 2 +- airflow/providers/elasticsearch/provider.yaml | 3 +- airflow/providers/fab/CHANGELOG.rst | 14 +++++ airflow/providers/fab/__init__.py | 2 +- airflow/providers/fab/provider.yaml | 3 +- airflow/providers/google/CHANGELOG.rst | 30 ++++++++++ airflow/providers/google/__init__.py | 2 +- airflow/providers/google/provider.yaml | 3 +- airflow/providers/jdbc/CHANGELOG.rst | 12 ++++ airflow/providers/jdbc/__init__.py | 2 +- airflow/providers/jdbc/provider.yaml | 3 +- .../providers/microsoft/azure/CHANGELOG.rst | 18 ++++++ airflow/providers/microsoft/azure/__init__.py | 2 +- .../providers/microsoft/azure/provider.yaml | 3 +- airflow/providers/mysql/CHANGELOG.rst | 12 ++++ airflow/providers/mysql/__init__.py | 2 +- airflow/providers/mysql/provider.yaml | 3 +- airflow/providers/openlineage/CHANGELOG.rst | 14 +++++ airflow/providers/openlineage/__init__.py | 2 +- airflow/providers/openlineage/provider.yaml | 3 +- airflow/providers/opensearch/CHANGELOG.rst | 22 +++++++ airflow/providers/opensearch/__init__.py | 2 +- airflow/providers/opensearch/provider.yaml | 3 +- airflow/providers/postgres/CHANGELOG.rst | 12 ++++ airflow/providers/postgres/__init__.py | 2 +- airflow/providers/postgres/provider.yaml | 3 +- airflow/providers/snowflake/CHANGELOG.rst | 18 ++++++ airflow/providers/snowflake/__init__.py | 2 +- airflow/providers/snowflake/provider.yaml | 3 +- airflow/providers/trino/CHANGELOG.rst | 12 ++++ airflow/providers/trino/__init__.py | 2 +- airflow/providers/trino/provider.yaml | 3 +- airflow/providers/ydb/CHANGELOG.rst | 12 ++++ airflow/providers/ydb/__init__.py | 2 +- airflow/providers/ydb/provider.yaml | 3 +- .../commits.rst | 28 ++++++++- .../apache-airflow-providers-amazon/index.rst | 6 +- .../commits.rst | 14 ++++- .../index.rst | 6 +- .../commits.rst | 14 ++++- .../index.rst | 2 +- .../commits.rst | 14 ++++- .../index.rst | 6 +- .../commits.rst | 22 +++++-- .../apache-airflow-providers-celery/index.rst | 7 ++- .../commits.rst | 14 ++++- .../index.rst | 2 +- .../commits.rst | 22 ++++++- .../index.rst | 2 +- .../commits.rst | 14 ++++- .../index.rst | 30 +++++++++- .../commits.rst | 15 ++++- .../index.rst | 6 +- .../commits.rst | 15 ++++- .../index.rst | 6 +- .../commits.rst | 16 ++++- .../index.rst | 6 +- .../commits.rst | 21 +++++-- .../index.rst | 6 +- .../commits.rst | 15 ++++- .../index.rst | 6 +- docs/apache-airflow-providers-fab/commits.rst | 14 +++++ docs/apache-airflow-providers-fab/index.rst | 51 ++++++++++++---- .../commits.rst | 24 +++++++- .../apache-airflow-providers-google/index.rst | 6 +- .../apache-airflow-providers-jdbc/commits.rst | 14 ++++- docs/apache-airflow-providers-jdbc/index.rst | 6 +- .../commits.rst | 16 ++++- .../index.rst | 14 ++--- .../commits.rst | 14 ++++- docs/apache-airflow-providers-mysql/index.rst | 6 +- .../commits.rst | 16 ++++- .../index.rst | 19 +++--- .../commits.rst | 16 ++++- .../index.rst | 2 +- .../commits.rst | 14 ++++- .../index.rst | 6 +- .../commits.rst | 16 ++++- .../index.rst | 7 ++- .../commits.rst | 14 ++++- docs/apache-airflow-providers-trino/index.rst | 6 +- docs/apache-airflow-providers-ydb/commits.rst | 14 ++++- docs/apache-airflow-providers-ydb/index.rst | 6 +- 119 files changed, 970 insertions(+), 175 deletions(-) diff --git a/airflow/providers/amazon/CHANGELOG.rst b/airflow/providers/amazon/CHANGELOG.rst index f4837c54fc366..8099f3943aac9 100644 --- a/airflow/providers/amazon/CHANGELOG.rst +++ b/airflow/providers/amazon/CHANGELOG.rst @@ -26,12 +26,30 @@ Changelog --------- -Main -...... +9.0.0 +..... Breaking changes ~~~~~~~~~~~~~~~~ +.. warning:: + In order to support session reuse in RedshiftData operators, the following breaking changes were introduced: + + The ``database`` argument is now optional and as a result was moved after the ``sql`` argument which is a positional + one. Update your DAGs accordingly if they rely on argument order. Applies to: + + * ``RedshiftDataHook``'s ``execute_query`` method + * ``RedshiftDataOperator`` + + ``RedshiftDataHook``'s ``execute_query`` method now returns a ``QueryExecutionOutput`` object instead of just the + statement ID as a string. + + ``RedshiftDataHook``'s ``parse_statement_resposne`` method was renamed to ``parse_statement_response``. + + ``S3ToRedshiftOperator``'s ``schema`` argument is now optional and was moved after the ``s3_key`` positional argument. + Update your DAGs accordingly if they rely on argument order. + + .. warning:: All deprecated classes, parameters and features have been removed from the Amazon provider package. The following breaking changes were introduced: @@ -123,27 +141,33 @@ Breaking changes * Removed ``host`` from AWS connection, please set it in ``extra['endpoint_url']`` instead * Removed ``region`` parameter from ``AwsHookParams``. Use ``region_name`` instead -.. warning:: - In order to support session reuse in RedshiftData operators, the following breaking changes were introduced: - - The ``database`` argument is now optional and as a result was moved after the ``sql`` argument which is a positional - one. Update your DAGs accordingly if they rely on argument order. Applies to: +* ``Remove deprecated stuff from Amazon provider package (#42450)`` +* ``Support session reuse in 'RedshiftDataOperator' (#42218)`` - * ``RedshiftDataHook``'s ``execute_query`` method - * ``RedshiftDataOperator`` +Features +~~~~~~~~ - ``RedshiftDataHook``'s ``execute_query`` method now returns a ``QueryExecutionOutput`` object instead of just the - statement ID as a string. +* ``Add STOPPED to the failure cases for Sagemaker Training Jobs (#42423)`` - ``RedshiftDataHook``'s ``parse_statement_resposne`` method was renamed to ``parse_statement_response``. +Bug Fixes +~~~~~~~~~ - ``S3ToRedshiftOperator``'s ``schema`` argument is now optional and was moved after the ``s3_key`` positional argument. - Update your DAGs accordingly if they rely on argument order. +* ``'S3DeleteObjects' Operator: Handle dates passed as strings (#42464)`` +* ``Small fix to AWS AVP cli init script (#42479)`` +* ``Make the AWS logging faster by reducing the amount of sleep (#42449)`` +* ``Fix logout in AWS auth manager (#42447)`` +* ``fix(providers/amazon): handle ClientError raised after key is missing during table.get_item (#42408)`` -Features -~~~~~~~~ +Misc +~~~~ -* ``Support session reuse in RedshiftDataOperator, RedshiftToS3Operator and S3ToRedshiftOperator (#42218)`` +* ``Drop python3.8 support core and providers (#42766)`` +* ``Removed conditional check for task context logging in airflow version 2.8.0 and above (#42764)`` +* ``Rename dataset related python variable names to asset (#41348)`` +* ``Remove identity center auth manager cli (#42481)`` +* ``Refactor AWS Auth manager user output (#42454)`` +* ``Remove 'sqlalchemy-redshift' dependency from Amazon provider (#42830)`` +* ``Revert "Remove 'sqlalchemy-redshift' dependency from Amazon provider" (#42864)`` 8.29.0 ...... diff --git a/airflow/providers/amazon/__init__.py b/airflow/providers/amazon/__init__.py index 0ec3cb606e99e..2a7f8d044e6c5 100644 --- a/airflow/providers/amazon/__init__.py +++ b/airflow/providers/amazon/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "8.29.0" +__version__ = "9.0.0" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/amazon/provider.yaml b/airflow/providers/amazon/provider.yaml index 1316cd05231a0..71d097f298198 100644 --- a/airflow/providers/amazon/provider.yaml +++ b/airflow/providers/amazon/provider.yaml @@ -22,9 +22,10 @@ description: | Amazon integration (including `Amazon Web Services (AWS) `__). state: ready -source-date-epoch: 1726859877 +source-date-epoch: 1728484310 # note that those versions are maintained by release manager - do not update them manually versions: + - 9.0.0 - 8.29.0 - 8.28.0 - 8.27.0 diff --git a/airflow/providers/apache/beam/CHANGELOG.rst b/airflow/providers/apache/beam/CHANGELOG.rst index 85f0ac8f72684..019858bae3cf1 100644 --- a/airflow/providers/apache/beam/CHANGELOG.rst +++ b/airflow/providers/apache/beam/CHANGELOG.rst @@ -26,6 +26,18 @@ Changelog --------- +5.8.1 +..... + +Bug Fixes +~~~~~~~~~ + +* ``Bugfix/dataflow job location passing (#41887)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 5.8.0 ..... diff --git a/airflow/providers/apache/beam/__init__.py b/airflow/providers/apache/beam/__init__.py index f88dd1a9909c9..5b844184bd12b 100644 --- a/airflow/providers/apache/beam/__init__.py +++ b/airflow/providers/apache/beam/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "5.8.0" +__version__ = "5.8.1" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/apache/beam/provider.yaml b/airflow/providers/apache/beam/provider.yaml index 8011f564d7194..ae0c4d37005b8 100644 --- a/airflow/providers/apache/beam/provider.yaml +++ b/airflow/providers/apache/beam/provider.yaml @@ -22,9 +22,10 @@ description: | `Apache Beam `__. state: ready -source-date-epoch: 1723969156 +source-date-epoch: 1728484489 # note that those versions are maintained by release manager - do not update them manually versions: + - 5.8.1 - 5.8.0 - 5.7.2 - 5.7.1 diff --git a/airflow/providers/apache/kafka/CHANGELOG.rst b/airflow/providers/apache/kafka/CHANGELOG.rst index 045475e755b52..38e11241ca3a8 100644 --- a/airflow/providers/apache/kafka/CHANGELOG.rst +++ b/airflow/providers/apache/kafka/CHANGELOG.rst @@ -27,6 +27,18 @@ Changelog --------- +1.6.1 +..... + +Bug Fixes +~~~~~~~~~ + +* ``remove callable functions parameter from kafka operator template_fields (#42555)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 1.6.0 ..... diff --git a/airflow/providers/apache/kafka/__init__.py b/airflow/providers/apache/kafka/__init__.py index 862c4c19b49a9..b7050a89dab8e 100644 --- a/airflow/providers/apache/kafka/__init__.py +++ b/airflow/providers/apache/kafka/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "1.6.0" +__version__ = "1.6.1" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/apache/kafka/provider.yaml b/airflow/providers/apache/kafka/provider.yaml index 8a577ba4cdad0..23961ec3c89f5 100644 --- a/airflow/providers/apache/kafka/provider.yaml +++ b/airflow/providers/apache/kafka/provider.yaml @@ -20,11 +20,12 @@ package-name: apache-airflow-providers-apache-kafka name: Apache Kafka state: ready -source-date-epoch: 1723969246 +source-date-epoch: 1728484555 description: | `Apache Kafka `__ # note that those versions are maintained by release manager - do not update them manually versions: + - 1.6.1 - 1.6.0 - 1.5.0 - 1.4.1 diff --git a/airflow/providers/apache/spark/CHANGELOG.rst b/airflow/providers/apache/spark/CHANGELOG.rst index b8773628e3634..df28a68ef7b5a 100644 --- a/airflow/providers/apache/spark/CHANGELOG.rst +++ b/airflow/providers/apache/spark/CHANGELOG.rst @@ -29,6 +29,14 @@ Changelog --------- +4.11.1 +...... + +Misc +~~~~ + +* ``Refactor function resolve_kerberos_principal (#42777)`` + 4.11.0 ...... diff --git a/airflow/providers/apache/spark/__init__.py b/airflow/providers/apache/spark/__init__.py index a652688b00546..a5873c7b144aa 100644 --- a/airflow/providers/apache/spark/__init__.py +++ b/airflow/providers/apache/spark/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "4.11.0" +__version__ = "4.11.1" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/apache/spark/provider.yaml b/airflow/providers/apache/spark/provider.yaml index 77ee6d109970c..8d9c2c704ae5d 100644 --- a/airflow/providers/apache/spark/provider.yaml +++ b/airflow/providers/apache/spark/provider.yaml @@ -22,9 +22,10 @@ description: | `Apache Spark `__ state: ready -source-date-epoch: 1726860120 +source-date-epoch: 1728484593 # note that those versions are maintained by release manager - do not update them manually versions: + - 4.11.1 - 4.11.0 - 4.10.0 - 4.9.0 diff --git a/airflow/providers/celery/CHANGELOG.rst b/airflow/providers/celery/CHANGELOG.rst index 435edfd6600b8..4043aefb1d13a 100644 --- a/airflow/providers/celery/CHANGELOG.rst +++ b/airflow/providers/celery/CHANGELOG.rst @@ -27,6 +27,24 @@ Changelog --------- +3.8.3 +..... + +Bug Fixes +~~~~~~~~~ + +* ``All executors should inherit from BaseExecutor (#41904)`` +* ``Remove state sync during celery task processing (#41870)`` + +Misc +~~~~ + +* ``Change imports to use Standard provider for BashOperator (#42252)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 3.8.2 ..... diff --git a/airflow/providers/celery/__init__.py b/airflow/providers/celery/__init__.py index 826a046783995..07ca45921e83c 100644 --- a/airflow/providers/celery/__init__.py +++ b/airflow/providers/celery/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "3.8.2" +__version__ = "3.8.3" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/celery/provider.yaml b/airflow/providers/celery/provider.yaml index e0cf09a515c6b..156a810c1fee4 100644 --- a/airflow/providers/celery/provider.yaml +++ b/airflow/providers/celery/provider.yaml @@ -22,9 +22,10 @@ description: | `Celery `__ state: ready -source-date-epoch: 1726860145 +source-date-epoch: 1728484718 # note that those versions are maintained by release manager - do not update them manually versions: + - 3.8.3 - 3.8.2 - 3.8.1 - 3.8.0 diff --git a/airflow/providers/cloudant/CHANGELOG.rst b/airflow/providers/cloudant/CHANGELOG.rst index a32cd2b42f2ec..e4446503a8cf6 100644 --- a/airflow/providers/cloudant/CHANGELOG.rst +++ b/airflow/providers/cloudant/CHANGELOG.rst @@ -27,6 +27,18 @@ Changelog --------- +4.0.1 +..... + +Misc +~~~~ + +* ``Drop python3.8 support core and providers (#42766)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 4.0.0 ..... diff --git a/airflow/providers/cloudant/__init__.py b/airflow/providers/cloudant/__init__.py index 05483dfd46169..db8335bb72c79 100644 --- a/airflow/providers/cloudant/__init__.py +++ b/airflow/providers/cloudant/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "4.0.0" +__version__ = "4.0.1" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/cloudant/provider.yaml b/airflow/providers/cloudant/provider.yaml index a2de5b4335468..57afe105209c5 100644 --- a/airflow/providers/cloudant/provider.yaml +++ b/airflow/providers/cloudant/provider.yaml @@ -22,9 +22,10 @@ description: | `IBM Cloudant `__ state: ready -source-date-epoch: 1723969866 +source-date-epoch: 1728484755 # note that those versions are maintained by release manager - do not update them manually versions: + - 4.0.1 - 4.0.0 - 3.6.0 - 3.5.2 diff --git a/airflow/providers/cncf/kubernetes/CHANGELOG.rst b/airflow/providers/cncf/kubernetes/CHANGELOG.rst index 3a8101943eaf3..98c179995fb3f 100644 --- a/airflow/providers/cncf/kubernetes/CHANGELOG.rst +++ b/airflow/providers/cncf/kubernetes/CHANGELOG.rst @@ -27,11 +27,33 @@ Changelog --------- -main +9.0.0 ..... +Breaking changes +~~~~~~~~~~~~~~~~ + .. warning:: - Support for identifying pods by execution_date during the upgrade from Airflow 1 to 2 has been removed. This may result in duplicate pods being launched for tasks originally started by Airflow 1, but only one of the task pods will succeed. + Support for identifying pods by execution_date during the upgrade from Airflow 1 to 2 has been removed. + This may result in duplicate pods being launched for tasks originally started by Airflow 1, but only one of the task pods will succeed. + +* ``kubernetes executor cleanup_stuck_queued_tasks optimization (#41220)`` + +Bug Fixes +~~~~~~~~~ + +* ``All executors should inherit from BaseExecutor (#41904)`` +* ``Fix mark as success when pod fails while fetching log (#42815)`` +* ``Fix SparkKubernetesOperator spark name. (#42427)`` +* ``KubernetesPodOperator never stops if credentials are refreshed (#42361)`` +* ``Restructured 'await_xcom_sidecar_container_start' method. (#42504)`` +* ``KubernetesHook kube_config extra can take dict (#41413)`` + +Misc +~~~~ + +* ``Drop python3.8 support core and providers (#42766)`` +* ``Remove airflow_version from k8s executor pod selector (#42751)`` 8.4.2 ..... diff --git a/airflow/providers/cncf/kubernetes/__init__.py b/airflow/providers/cncf/kubernetes/__init__.py index 5f4798d1d98dc..34c63b7fcf87f 100644 --- a/airflow/providers/cncf/kubernetes/__init__.py +++ b/airflow/providers/cncf/kubernetes/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "8.4.2" +__version__ = "9.0.0" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/common/compat/CHANGELOG.rst b/airflow/providers/common/compat/CHANGELOG.rst index 27770f785cc7b..49dfa7b41149b 100644 --- a/airflow/providers/common/compat/CHANGELOG.rst +++ b/airflow/providers/common/compat/CHANGELOG.rst @@ -25,6 +25,18 @@ Changelog --------- +1.2.1 +..... + +Misc +~~~~ + +* ``Rename dataset related python variable names to asset (#41348)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 1.2.0 ..... diff --git a/airflow/providers/common/compat/__init__.py b/airflow/providers/common/compat/__init__.py index ffe1acbcce74f..ef51cb422e513 100644 --- a/airflow/providers/common/compat/__init__.py +++ b/airflow/providers/common/compat/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "1.2.0" +__version__ = "1.2.1" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/common/compat/provider.yaml b/airflow/providers/common/compat/provider.yaml index 7ac3a02900924..3618ecfe6435d 100644 --- a/airflow/providers/common/compat/provider.yaml +++ b/airflow/providers/common/compat/provider.yaml @@ -22,9 +22,10 @@ description: | ``Common Compatibility Provider - providing compatibility code for previous Airflow versions.`` state: ready -source-date-epoch: 1723970013 +source-date-epoch: 1728484960 # note that those versions are maintained by release manager - do not update them manually versions: + - 1.2.1 - 1.2.0 - 1.1.0 - 1.0.0 diff --git a/airflow/providers/common/io/CHANGELOG.rst b/airflow/providers/common/io/CHANGELOG.rst index 6b787fe9e4b30..a80b855efd268 100644 --- a/airflow/providers/common/io/CHANGELOG.rst +++ b/airflow/providers/common/io/CHANGELOG.rst @@ -25,6 +25,19 @@ Changelog --------- +1.4.2 +..... + +Misc +~~~~ + +* ``Drop python3.8 support core and providers (#42766)`` +* ``Rename dataset related python variable names to asset (#41348)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 1.4.1 ..... diff --git a/airflow/providers/common/io/__init__.py b/airflow/providers/common/io/__init__.py index ac5d2626cfc49..ac05e1f448dc7 100644 --- a/airflow/providers/common/io/__init__.py +++ b/airflow/providers/common/io/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "1.4.1" +__version__ = "1.4.2" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/common/io/provider.yaml b/airflow/providers/common/io/provider.yaml index 6743cfff86c40..c32f046eb929a 100644 --- a/airflow/providers/common/io/provider.yaml +++ b/airflow/providers/common/io/provider.yaml @@ -22,9 +22,10 @@ description: | ``Common IO Provider`` state: ready -source-date-epoch: 1726860435 +source-date-epoch: 1728484966 # note that those versions are maintained by release manager - do not update them manually versions: + - 1.4.2 - 1.4.1 - 1.4.0 - 1.3.2 diff --git a/airflow/providers/common/sql/CHANGELOG.rst b/airflow/providers/common/sql/CHANGELOG.rst index 531353f8c800f..50338885101b4 100644 --- a/airflow/providers/common/sql/CHANGELOG.rst +++ b/airflow/providers/common/sql/CHANGELOG.rst @@ -25,6 +25,23 @@ Changelog --------- +1.18.0 +...... + +Features +~~~~~~~~ + +* ``feat(providers/common/sql): add warning to connection setter (#42736)`` + +Bug Fixes +~~~~~~~~~ + +* ``FIX: Only pass connection to sqlalchemy engine in JdbcHook (#42705)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 1.17.1 ...... diff --git a/airflow/providers/common/sql/__init__.py b/airflow/providers/common/sql/__init__.py index 6ef37aa0ed669..53b2468c53268 100644 --- a/airflow/providers/common/sql/__init__.py +++ b/airflow/providers/common/sql/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "1.17.1" +__version__ = "1.18.0" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/common/sql/provider.yaml b/airflow/providers/common/sql/provider.yaml index ec487aca3f001..3b520e9454a4f 100644 --- a/airflow/providers/common/sql/provider.yaml +++ b/airflow/providers/common/sql/provider.yaml @@ -22,9 +22,10 @@ description: | `Common SQL Provider `__ state: ready -source-date-epoch: 1727372263 +source-date-epoch: 1728485007 # note that those versions are maintained by release manager - do not update them manually versions: + - 1.18.0 - 1.17.1 - 1.17.0 - 1.16.0 diff --git a/airflow/providers/databricks/CHANGELOG.rst b/airflow/providers/databricks/CHANGELOG.rst index 487d7dc50f4ff..28fec73032341 100644 --- a/airflow/providers/databricks/CHANGELOG.rst +++ b/airflow/providers/databricks/CHANGELOG.rst @@ -26,6 +26,24 @@ Changelog --------- +6.11.0 +...... + +Features +~~~~~~~~ + +* ``Add 'on_kill' to Databricks Workflow Operator (#42115)`` + +Misc +~~~~ + +* ``add warning log when task_key>100 (#42813)`` +* ``Add debug logs to print Request/Response data in Databricks provider (#42662)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 6.10.0 ...... diff --git a/airflow/providers/databricks/__init__.py b/airflow/providers/databricks/__init__.py index 7124228f796a1..d81dd8d4c5a22 100644 --- a/airflow/providers/databricks/__init__.py +++ b/airflow/providers/databricks/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "6.10.0" +__version__ = "6.11.0" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/databricks/provider.yaml b/airflow/providers/databricks/provider.yaml index db60aa187ea00..35d9a6aabcead 100644 --- a/airflow/providers/databricks/provider.yaml +++ b/airflow/providers/databricks/provider.yaml @@ -22,9 +22,10 @@ description: | `Databricks `__ state: ready -source-date-epoch: 1726860611 +source-date-epoch: 1728485027 # note that those versions are maintained by release manager - do not update them manually versions: + - 6.11.0 - 6.10.0 - 6.9.0 - 6.8.0 diff --git a/airflow/providers/dbt/cloud/CHANGELOG.rst b/airflow/providers/dbt/cloud/CHANGELOG.rst index db0e93fb8fd40..7eca133d4957d 100644 --- a/airflow/providers/dbt/cloud/CHANGELOG.rst +++ b/airflow/providers/dbt/cloud/CHANGELOG.rst @@ -28,6 +28,23 @@ Changelog --------- +3.11.0 +...... + +Features +~~~~~~~~ + +* ``Add ability to provide proxy for dbt Cloud connection (#42737)`` + +Misc +~~~~ + +* ``Simplify code for recent dbt provider change (#42840)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 3.10.1 ...... diff --git a/airflow/providers/dbt/cloud/__init__.py b/airflow/providers/dbt/cloud/__init__.py index f532436b649ce..a72beeb7aedc6 100644 --- a/airflow/providers/dbt/cloud/__init__.py +++ b/airflow/providers/dbt/cloud/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "3.10.1" +__version__ = "3.11.0" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/dbt/cloud/provider.yaml b/airflow/providers/dbt/cloud/provider.yaml index 2c80397a1506d..828aecf335435 100644 --- a/airflow/providers/dbt/cloud/provider.yaml +++ b/airflow/providers/dbt/cloud/provider.yaml @@ -22,9 +22,10 @@ description: | `dbt Cloud `__ state: ready -source-date-epoch: 1726860654 +source-date-epoch: 1728485039 # note that those versions are maintained by release manager - do not update them manually versions: + - 3.11.0 - 3.10.1 - 3.10.0 - 3.9.0 diff --git a/airflow/providers/elasticsearch/CHANGELOG.rst b/airflow/providers/elasticsearch/CHANGELOG.rst index 2e51c8572b227..46dafdd4cd0bb 100644 --- a/airflow/providers/elasticsearch/CHANGELOG.rst +++ b/airflow/providers/elasticsearch/CHANGELOG.rst @@ -27,6 +27,19 @@ Changelog --------- +5.5.2 +..... + +Misc +~~~~ + +* ``Removed conditional check for task context logging in airflow version 2.8.0 and above (#42764)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + * ``Fix spelling; 'Airlfow' -> 'Airflow' (#42855)`` + 5.5.1 ..... diff --git a/airflow/providers/elasticsearch/__init__.py b/airflow/providers/elasticsearch/__init__.py index 96eab6484f84a..3d480fc501db7 100644 --- a/airflow/providers/elasticsearch/__init__.py +++ b/airflow/providers/elasticsearch/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "5.5.1" +__version__ = "5.5.2" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/elasticsearch/provider.yaml b/airflow/providers/elasticsearch/provider.yaml index 3c90c6763d154..595a3d45be5cd 100644 --- a/airflow/providers/elasticsearch/provider.yaml +++ b/airflow/providers/elasticsearch/provider.yaml @@ -22,9 +22,10 @@ description: | `Elasticsearch `__ state: ready -source-date-epoch: 1726860725 +source-date-epoch: 1728485068 # note that those versions are maintained by release manager - do not update them manually versions: + - 5.5.2 - 5.5.1 - 5.5.0 - 5.4.2 diff --git a/airflow/providers/fab/CHANGELOG.rst b/airflow/providers/fab/CHANGELOG.rst index 1f1d09fea932d..4419184cc3c8b 100644 --- a/airflow/providers/fab/CHANGELOG.rst +++ b/airflow/providers/fab/CHANGELOG.rst @@ -20,6 +20,20 @@ Changelog --------- +1.4.1 +..... + +Misc +~~~~ + +* ``Update Rest API tests to no longer rely on FAB auth manager. Move tests specific to FAB permissions to FAB provider (#42523)`` +* ``Rename dataset related python variable names to asset (#41348)`` +* ``Simplify expression for get_permitted_dag_ids query (#42484)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 1.4.0 ..... diff --git a/airflow/providers/fab/__init__.py b/airflow/providers/fab/__init__.py index 263ca95e58a5c..7403b3712d2ff 100644 --- a/airflow/providers/fab/__init__.py +++ b/airflow/providers/fab/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "1.4.0" +__version__ = "1.4.1" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.9.0" diff --git a/airflow/providers/fab/provider.yaml b/airflow/providers/fab/provider.yaml index 1d5cc820f1f24..c5e6f76cccf26 100644 --- a/airflow/providers/fab/provider.yaml +++ b/airflow/providers/fab/provider.yaml @@ -28,10 +28,11 @@ description: | # For providers until we think it should be released. state: ready -source-date-epoch: 1726860772 +source-date-epoch: 1728485088 # note that those versions are maintained by release manager - do not update them manually versions: + - 1.4.1 - 1.4.0 - 1.3.0 - 1.2.2 diff --git a/airflow/providers/google/CHANGELOG.rst b/airflow/providers/google/CHANGELOG.rst index 7d0ae7e7985ca..e4d9a15099341 100644 --- a/airflow/providers/google/CHANGELOG.rst +++ b/airflow/providers/google/CHANGELOG.rst @@ -27,6 +27,36 @@ Changelog --------- +10.24.0 +....... + +Features +~~~~~~~~ + +* ``Add 'retry_if_resource_not_ready' logic for DataprocCreateClusterOperator and DataprocCreateBatchOperator (#42703)`` + +Bug Fixes +~~~~~~~~~ + +* ``Publish Dataproc Serverless Batch link after it starts if batch_id was provided (#41153)`` +* ``fix PubSubAsyncHook in PubsubPullTrigger to use gcp_conn_id (#42671)`` +* ``Fix consistent return response from PubSubPullSensor (#42080)`` +* ``Undo partition exclusion from the table name when splitting a full BigQuery table name (#42541)`` +* ``Fix gcp text to speech uri fetch (#42309)`` +* ``Refactor ''bucket.get_blob'' calls in ''GCSHook'' to handle validation for non-existent objects. (#42474)`` +* ``Bugfix/dataflow job location passing (#41887)`` + +Misc +~~~~ + +* ``Removed conditional check for task context logging in airflow version 2.8.0 and above (#42764)`` +* ``Rename dataset related python variable names to asset (#41348)`` +* ``Deprecate AutoMLBatchPredictOperator and refactor AutoMl system tests (#42260)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 10.23.0 ....... diff --git a/airflow/providers/google/__init__.py b/airflow/providers/google/__init__.py index ffbf74f2df63f..e5caa3ef42b10 100644 --- a/airflow/providers/google/__init__.py +++ b/airflow/providers/google/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "10.23.0" +__version__ = "10.24.0" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/google/provider.yaml b/airflow/providers/google/provider.yaml index a64b2ce17a76e..9e469d278775b 100644 --- a/airflow/providers/google/provider.yaml +++ b/airflow/providers/google/provider.yaml @@ -29,9 +29,10 @@ description: | - `Google Workspace `__ (formerly Google Suite) state: ready -source-date-epoch: 1726860901 +source-date-epoch: 1728485162 # note that those versions are maintained by release manager - do not update them manually versions: + - 10.24.0 - 10.23.0 - 10.22.0 - 10.21.1 diff --git a/airflow/providers/jdbc/CHANGELOG.rst b/airflow/providers/jdbc/CHANGELOG.rst index 872ddeea5963f..22894e7b73fb3 100644 --- a/airflow/providers/jdbc/CHANGELOG.rst +++ b/airflow/providers/jdbc/CHANGELOG.rst @@ -26,6 +26,18 @@ Changelog --------- +4.5.2 +..... + +Bug Fixes +~~~~~~~~~ + +* ``FIX: Only pass connection to sqlalchemy engine in JdbcHook (#42705)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 4.5.1 ..... diff --git a/airflow/providers/jdbc/__init__.py b/airflow/providers/jdbc/__init__.py index 2bda2a955fea3..dde5dab0cc937 100644 --- a/airflow/providers/jdbc/__init__.py +++ b/airflow/providers/jdbc/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "4.5.1" +__version__ = "4.5.2" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/jdbc/provider.yaml b/airflow/providers/jdbc/provider.yaml index c247b079f7731..5165d33939617 100644 --- a/airflow/providers/jdbc/provider.yaml +++ b/airflow/providers/jdbc/provider.yaml @@ -22,9 +22,10 @@ description: | `Java Database Connectivity (JDBC) `__ state: ready -source-date-epoch: 1726860958 +source-date-epoch: 1728485176 # note that those versions are maintained by release manager - do not update them manually versions: + - 4.5.2 - 4.5.1 - 4.5.0 - 4.4.0 diff --git a/airflow/providers/microsoft/azure/CHANGELOG.rst b/airflow/providers/microsoft/azure/CHANGELOG.rst index be33a4e95d3fa..148fce2e1152f 100644 --- a/airflow/providers/microsoft/azure/CHANGELOG.rst +++ b/airflow/providers/microsoft/azure/CHANGELOG.rst @@ -27,6 +27,24 @@ Changelog --------- +10.5.1 +...... + +Bug Fixes +~~~~~~~~~ + +* ``(bugfix): Paginated results in MSGraphAsyncOperator (#42414)`` + +Misc +~~~~ + +* ``Workaround pin azure kusto data (#42576)`` +* ``Removed conditional check for task context logging in airflow version 2.8.0 and above (#42764)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 10.5.0 ...... diff --git a/airflow/providers/microsoft/azure/__init__.py b/airflow/providers/microsoft/azure/__init__.py index bd769cef5e33d..44aafcaa1730a 100644 --- a/airflow/providers/microsoft/azure/__init__.py +++ b/airflow/providers/microsoft/azure/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "10.5.0" +__version__ = "10.5.1" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/microsoft/azure/provider.yaml b/airflow/providers/microsoft/azure/provider.yaml index 45fe28eecffc7..a47ec86de9f78 100644 --- a/airflow/providers/microsoft/azure/provider.yaml +++ b/airflow/providers/microsoft/azure/provider.yaml @@ -20,9 +20,10 @@ name: Microsoft Azure description: | `Microsoft Azure `__ state: ready -source-date-epoch: 1726861002 +source-date-epoch: 1728485226 # note that those versions are maintained by release manager - do not update them manually versions: + - 10.5.1 - 10.5.0 - 10.4.0 - 10.3.0 diff --git a/airflow/providers/mysql/CHANGELOG.rst b/airflow/providers/mysql/CHANGELOG.rst index 7acdd410135b2..3426e0cd10883 100644 --- a/airflow/providers/mysql/CHANGELOG.rst +++ b/airflow/providers/mysql/CHANGELOG.rst @@ -26,6 +26,18 @@ Changelog --------- +5.7.2 +..... + +Misc +~~~~ + +* ``Rename dataset related python variable names to asset (#41348)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 5.7.1 ..... diff --git a/airflow/providers/mysql/__init__.py b/airflow/providers/mysql/__init__.py index 586de40e0c0df..4c897ee113178 100644 --- a/airflow/providers/mysql/__init__.py +++ b/airflow/providers/mysql/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "5.7.1" +__version__ = "5.7.2" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/mysql/provider.yaml b/airflow/providers/mysql/provider.yaml index f0f77f28d0e94..82e5b8cda2c3b 100644 --- a/airflow/providers/mysql/provider.yaml +++ b/airflow/providers/mysql/provider.yaml @@ -22,9 +22,10 @@ description: | `MySQL `__ state: ready -source-date-epoch: 1726861041 +source-date-epoch: 1728485234 # note that those versions are maintained by release manager - do not update them manually versions: + - 5.7.2 - 5.7.1 - 5.7.0 - 5.6.3 diff --git a/airflow/providers/openlineage/CHANGELOG.rst b/airflow/providers/openlineage/CHANGELOG.rst index 0e35dab6deaa2..05c20a241e624 100644 --- a/airflow/providers/openlineage/CHANGELOG.rst +++ b/airflow/providers/openlineage/CHANGELOG.rst @@ -26,6 +26,20 @@ Changelog --------- +1.12.2 +...... + +Misc +~~~~ + +* ``Change imports to use Standard provider for BashOperator (#42252)`` +* ``Drop python3.8 support core and providers (#42766)`` +* ``Rename dataset related python variable names to asset (#41348)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 1.12.1 ...... diff --git a/airflow/providers/openlineage/__init__.py b/airflow/providers/openlineage/__init__.py index 664e5530ebf97..676fae32d2594 100644 --- a/airflow/providers/openlineage/__init__.py +++ b/airflow/providers/openlineage/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "1.12.1" +__version__ = "1.12.2" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/openlineage/provider.yaml b/airflow/providers/openlineage/provider.yaml index 5b08ac6a77ae7..fadaf9c1e07a4 100644 --- a/airflow/providers/openlineage/provider.yaml +++ b/airflow/providers/openlineage/provider.yaml @@ -22,9 +22,10 @@ description: | `OpenLineage `__ state: ready -source-date-epoch: 1727372276 +source-date-epoch: 1728485291 # note that those versions are maintained by release manager - do not update them manually versions: + - 1.12.2 - 1.12.1 - 1.12.0 - 1.11.0 diff --git a/airflow/providers/opensearch/CHANGELOG.rst b/airflow/providers/opensearch/CHANGELOG.rst index 4bc3235a79cf8..964a44f213d43 100644 --- a/airflow/providers/opensearch/CHANGELOG.rst +++ b/airflow/providers/opensearch/CHANGELOG.rst @@ -27,6 +27,28 @@ Changelog --------- +1.5.0 +..... + +Features +~~~~~~~~ + +* ``(feat): Add opensearch logging integration (#41799)`` + +Bug Fixes +~~~~~~~~~ + +* ``Handle empty login and password with opensearch client (#39982)`` + +Misc +~~~~ + +* ``Removed conditional check for task context logging in airflow version 2.8.0 and above (#42764)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 1.4.0 ..... diff --git a/airflow/providers/opensearch/__init__.py b/airflow/providers/opensearch/__init__.py index b98bfd8b5a51a..457d4fdeaaf70 100644 --- a/airflow/providers/opensearch/__init__.py +++ b/airflow/providers/opensearch/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "1.4.0" +__version__ = "1.5.0" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/opensearch/provider.yaml b/airflow/providers/opensearch/provider.yaml index 9cfea50540911..e5b3027ebef67 100644 --- a/airflow/providers/opensearch/provider.yaml +++ b/airflow/providers/opensearch/provider.yaml @@ -22,9 +22,10 @@ description: | `OpenSearch `__ state: ready -source-date-epoch: 1723970482 +source-date-epoch: 1728485323 # note that those versions are maintained by release manager - do not update them manually versions: + - 1.5.0 - 1.4.0 - 1.3.0 - 1.2.1 diff --git a/airflow/providers/postgres/CHANGELOG.rst b/airflow/providers/postgres/CHANGELOG.rst index 8c5b65d3e32ed..b1a42da50d395 100644 --- a/airflow/providers/postgres/CHANGELOG.rst +++ b/airflow/providers/postgres/CHANGELOG.rst @@ -27,6 +27,18 @@ Changelog --------- +5.13.1 +...... + +Misc +~~~~ + +* ``Rename dataset related python variable names to asset (#41348)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 5.13.0 ...... diff --git a/airflow/providers/postgres/__init__.py b/airflow/providers/postgres/__init__.py index 5d86bd461a93a..fe91d927f8fd7 100644 --- a/airflow/providers/postgres/__init__.py +++ b/airflow/providers/postgres/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "5.13.0" +__version__ = "5.13.1" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/postgres/provider.yaml b/airflow/providers/postgres/provider.yaml index edbbaeb1da2c1..51332b9b0feaa 100644 --- a/airflow/providers/postgres/provider.yaml +++ b/airflow/providers/postgres/provider.yaml @@ -22,9 +22,10 @@ description: | `PostgreSQL `__ state: ready -source-date-epoch: 1726861120 +source-date-epoch: 1728485329 # note that those versions are maintained by release manager - do not update them manually versions: + - 5.13.1 - 5.13.0 - 5.12.0 - 5.11.3 diff --git a/airflow/providers/snowflake/CHANGELOG.rst b/airflow/providers/snowflake/CHANGELOG.rst index 1d5712b5d5be2..06554edf213e7 100644 --- a/airflow/providers/snowflake/CHANGELOG.rst +++ b/airflow/providers/snowflake/CHANGELOG.rst @@ -27,6 +27,24 @@ Changelog --------- +5.8.0 +..... + +Features +~~~~~~~~ + +* ``Add Snowpark operator and decorator (#42457)`` + +Bug Fixes +~~~~~~~~~ + +* ``fix: SnowflakeSqlApiOperator not resolving parameters in SQL (#42719)`` +* ``Make 'private_key_content' a sensitive field in Snowflake connection (#42649)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 5.7.1 ..... diff --git a/airflow/providers/snowflake/__init__.py b/airflow/providers/snowflake/__init__.py index 607ebb12fbff5..9a4e264521c73 100644 --- a/airflow/providers/snowflake/__init__.py +++ b/airflow/providers/snowflake/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "5.7.1" +__version__ = "5.8.0" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/snowflake/provider.yaml b/airflow/providers/snowflake/provider.yaml index 47de902ff65da..08a2ce861d477 100644 --- a/airflow/providers/snowflake/provider.yaml +++ b/airflow/providers/snowflake/provider.yaml @@ -22,9 +22,10 @@ description: | `Snowflake `__ state: ready -source-date-epoch: 1726861185 +source-date-epoch: 1728485406 # note that those versions are maintained by release manager - do not update them manually versions: + - 5.8.0 - 5.7.1 - 5.7.0 - 5.6.1 diff --git a/airflow/providers/trino/CHANGELOG.rst b/airflow/providers/trino/CHANGELOG.rst index a0f32bc76a3c5..09c2984477541 100644 --- a/airflow/providers/trino/CHANGELOG.rst +++ b/airflow/providers/trino/CHANGELOG.rst @@ -27,6 +27,18 @@ Changelog --------- +5.8.1 +..... + +Misc +~~~~ + +* ``Rename dataset related python variable names to asset (#41348)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 5.8.0 ..... diff --git a/airflow/providers/trino/__init__.py b/airflow/providers/trino/__init__.py index e729510404c11..a675398f4f562 100644 --- a/airflow/providers/trino/__init__.py +++ b/airflow/providers/trino/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "5.8.0" +__version__ = "5.8.1" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/trino/provider.yaml b/airflow/providers/trino/provider.yaml index 424be2cca67d9..876244aa1c6a8 100644 --- a/airflow/providers/trino/provider.yaml +++ b/airflow/providers/trino/provider.yaml @@ -22,9 +22,10 @@ description: | `Trino `__ state: ready -source-date-epoch: 1723970622 +source-date-epoch: 1728485413 # note that those versions are maintained by release manager - do not update them manually versions: + - 5.8.1 - 5.8.0 - 5.7.2 - 5.7.1 diff --git a/airflow/providers/ydb/CHANGELOG.rst b/airflow/providers/ydb/CHANGELOG.rst index a6ee9aa6b104e..afb2845e15175 100644 --- a/airflow/providers/ydb/CHANGELOG.rst +++ b/airflow/providers/ydb/CHANGELOG.rst @@ -27,6 +27,18 @@ Changelog --------- +1.4.0 +..... + +Features +~~~~~~~~ + +* ``Add an ability to use scan queries via new YDB operator (#42311)`` + + +.. Below changes are excluded from the changelog. Move them to + appropriate section above if needed. Do not delete the lines(!): + 1.3.0 ..... diff --git a/airflow/providers/ydb/__init__.py b/airflow/providers/ydb/__init__.py index b7b03b98626b7..193a62740ada4 100644 --- a/airflow/providers/ydb/__init__.py +++ b/airflow/providers/ydb/__init__.py @@ -29,7 +29,7 @@ __all__ = ["__version__"] -__version__ = "1.3.0" +__version__ = "1.4.0" if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse( "2.8.0" diff --git a/airflow/providers/ydb/provider.yaml b/airflow/providers/ydb/provider.yaml index a956100dc3f67..18f7140e1aa1c 100644 --- a/airflow/providers/ydb/provider.yaml +++ b/airflow/providers/ydb/provider.yaml @@ -22,9 +22,10 @@ description: | `YDB `__ state: ready -source-date-epoch: 1723970719 +source-date-epoch: 1728485455 # note that those versions are maintained by release manager - do not update them manually versions: + - 1.4.0 - 1.3.0 - 1.2.0 - 1.1.0 diff --git a/docs/apache-airflow-providers-amazon/commits.rst b/docs/apache-airflow-providers-amazon/commits.rst index 309c2b5989fcd..07aa8f84e0aee 100644 --- a/docs/apache-airflow-providers-amazon/commits.rst +++ b/docs/apache-airflow-providers-amazon/commits.rst @@ -35,14 +35,40 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-09 ``Revert "Remove 'sqlalchemy-redshift' dependency from Amazon provider (#42830)" (#42864)`` +`3b4c73a644 `_ 2024-10-08 ``Remove 'sqlalchemy-redshift' dependency from Amazon provider (#42830)`` +`63ff22f403 `_ 2024-10-08 ``Drop python3.8 support core and providers (#42766)`` +`1cb9294c64 `_ 2024-10-06 ``Removed conditional check for task context logging in airflow version 2.8.0 and above (#42764)`` +`ede7cb27fd `_ 2024-09-30 ``Rename dataset related python variable names to asset (#41348)`` +`568162263d `_ 2024-09-26 ``Remove deprecated stuff from Amazon provider package (#42450)`` +`ac0711f0f1 `_ 2024-09-26 ``'S3DeleteObjects' Operator: Handle dates passed as strings (#42464)`` +`f6852c2c55 `_ 2024-09-25 ``Remove identity center auth manager cli (#42481)`` +`663da777f9 `_ 2024-09-25 ``Small fix to AWS AVP cli init script (#42479)`` +`18c54bcb42 `_ 2024-09-25 ``#42442 Make the AWS logging faster by reducing the amount of sleep (#42449)`` +`d87f9b0505 `_ 2024-09-25 ``Refactor AWS Auth manager user output (#42454)`` +`ab3429c318 `_ 2024-09-24 ``Add STOPPED to the failure cases for Sagemaker Training Jobs (#42423)`` +`8580e6d046 `_ 2024-09-24 ``Support session reuse in 'RedshiftDataOperator' (#42218)`` +`4c8c72f4a4 `_ 2024-09-24 ``Fix logout in AWS auth manager (#42447)`` +`b9629d99b2 `_ 2024-09-24 ``fix(providers/amazon): handle ClientError raised after key is missing during table.get_item (#42408)`` +================================================================================================= =========== ======================================================================================================== + 8.29.0 ...... -Latest change: 2024-09-16 +Latest change: 2024-09-21 ================================================================================================= =========== ================================================================================================= Commit Committed Subject ================================================================================================= =========== ================================================================================================= +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `4afc2569b1 `_ 2024-09-16 ``ECSExecutor: Drop params that aren't compatible with EC2 (#42228)`` `5fb0d65aaf `_ 2024-09-11 ``Adding support for volume configurations in ECSRunTaskOperator (#42087)`` `1bde32ab7b `_ 2024-09-11 ``Actually move saml to amazon provider (mistakenly added in papermill) (#42148)`` diff --git a/docs/apache-airflow-providers-amazon/index.rst b/docs/apache-airflow-providers-amazon/index.rst index c58847dcd8876..6d354b1a5f894 100644 --- a/docs/apache-airflow-providers-amazon/index.rst +++ b/docs/apache-airflow-providers-amazon/index.rst @@ -86,7 +86,7 @@ apache-airflow-providers-amazon package Amazon integration (including `Amazon Web Services (AWS) `__). -Release: 8.29.0 +Release: 9.0.0 Provider package ---------------- @@ -164,5 +164,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-amazon 8.29.0 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-amazon 8.29.0 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-amazon 9.0.0 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-amazon 9.0.0 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-apache-beam/commits.rst b/docs/apache-airflow-providers-apache-beam/commits.rst index 0173d7bbf59fa..961ece4142294 100644 --- a/docs/apache-airflow-providers-apache-beam/commits.rst +++ b/docs/apache-airflow-providers-apache-beam/commits.rst @@ -35,14 +35,26 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-09-24 ``Bugfix/dataflow job location passing (#41887)`` +================================================================================================= =========== ================================================= + 5.8.0 ..... -Latest change: 2024-08-12 +Latest change: 2024-08-19 ================================================================================================= =========== ======================================================================= Commit Committed Subject ================================================================================================= =========== ======================================================================= +`75fb7acbac `_ 2024-08-19 ``Prepare docs for Aug 2nd wave of providers (#41559)`` `fcbff15bda `_ 2024-08-12 ``Bump minimum Airflow version in providers to Airflow 2.8.0 (#41396)`` ================================================================================================= =========== ======================================================================= diff --git a/docs/apache-airflow-providers-apache-beam/index.rst b/docs/apache-airflow-providers-apache-beam/index.rst index 0934dc3cae5d7..a8b7396b6706e 100644 --- a/docs/apache-airflow-providers-apache-beam/index.rst +++ b/docs/apache-airflow-providers-apache-beam/index.rst @@ -75,7 +75,7 @@ apache-airflow-providers-apache-beam package `Apache Beam `__. -Release: 5.8.0 +Release: 5.8.1 Provider package ---------------- @@ -128,5 +128,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-apache-beam 5.8.0 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-apache-beam 5.8.0 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-apache-beam 5.8.1 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-apache-beam 5.8.1 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-apache-kafka/commits.rst b/docs/apache-airflow-providers-apache-kafka/commits.rst index 8aee39b02018d..7fba0473facb4 100644 --- a/docs/apache-airflow-providers-apache-kafka/commits.rst +++ b/docs/apache-airflow-providers-apache-kafka/commits.rst @@ -35,14 +35,26 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-09-29 ``remove callable functions parameter from kafka operator template_fields (#42555)`` +================================================================================================= =========== ==================================================================================== + 1.6.0 ..... -Latest change: 2024-08-12 +Latest change: 2024-08-19 ================================================================================================= =========== ======================================================================= Commit Committed Subject ================================================================================================= =========== ======================================================================= +`75fb7acbac `_ 2024-08-19 ``Prepare docs for Aug 2nd wave of providers (#41559)`` `fcbff15bda `_ 2024-08-12 ``Bump minimum Airflow version in providers to Airflow 2.8.0 (#41396)`` `d23881c648 `_ 2024-08-03 ``Prepare docs for Aug 1st wave of providers (#41230)`` `09a7bd1d58 `_ 2024-07-09 ``Prepare docs 1st wave July 2024 (#40644)`` diff --git a/docs/apache-airflow-providers-apache-kafka/index.rst b/docs/apache-airflow-providers-apache-kafka/index.rst index f735b162f4939..0879b57fb7f1b 100644 --- a/docs/apache-airflow-providers-apache-kafka/index.rst +++ b/docs/apache-airflow-providers-apache-kafka/index.rst @@ -82,7 +82,7 @@ apache-airflow-providers-apache-kafka package `Apache Kafka `__ -Release: 1.6.0 +Release: 1.6.1 Provider package ---------------- diff --git a/docs/apache-airflow-providers-apache-spark/commits.rst b/docs/apache-airflow-providers-apache-spark/commits.rst index 674f50007190f..45a324a63262f 100644 --- a/docs/apache-airflow-providers-apache-spark/commits.rst +++ b/docs/apache-airflow-providers-apache-spark/commits.rst @@ -35,14 +35,26 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-07 ``The function resolve_kerberos_principal updated when airflow version 2.8 and above (#42777)`` +================================================================================================= =========== =============================================================================================== + 4.11.0 ...... -Latest change: 2024-09-17 +Latest change: 2024-09-21 ================================================================================================= =========== ========================================================================================= Commit Committed Subject ================================================================================================= =========== ========================================================================================= +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `1f10532163 `_ 2024-09-17 ``Add kerberos related connection fields(principal, keytab) on SparkSubmitHook (#40757)`` ================================================================================================= =========== ========================================================================================= diff --git a/docs/apache-airflow-providers-apache-spark/index.rst b/docs/apache-airflow-providers-apache-spark/index.rst index db6da3392787e..6e36428a46291 100644 --- a/docs/apache-airflow-providers-apache-spark/index.rst +++ b/docs/apache-airflow-providers-apache-spark/index.rst @@ -77,7 +77,7 @@ apache-airflow-providers-apache-spark package `Apache Spark `__ -Release: 4.11.0 +Release: 4.11.1 Provider package ---------------- @@ -130,5 +130,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-apache-spark 4.11.0 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-apache-spark 4.11.0 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-apache-spark 4.11.1 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-apache-spark 4.11.1 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-celery/commits.rst b/docs/apache-airflow-providers-celery/commits.rst index 115438b7e7e45..302e845f4d138 100644 --- a/docs/apache-airflow-providers-celery/commits.rst +++ b/docs/apache-airflow-providers-celery/commits.rst @@ -35,16 +35,30 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-09 ``Standard provider bash operator (#42252)`` +`c72dad7eaf `_ 2024-10-01 ``All executors should inherit from BaseExecutor (#41904)`` +`f463e2a0dd `_ 2024-10-01 ``Remove state sync during celery task processing (#41870)`` +================================================================================================= =========== ============================================================ + 3.8.2 ..... -Latest change: 2024-08-25 +Latest change: 2024-09-21 -================================================================================================= =========== ============================= +================================================================================================= =========== ======================================================= Commit Committed Subject -================================================================================================= =========== ============================= +================================================================================================= =========== ======================================================= +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `1613e9ec1c `_ 2024-08-25 ``remove soft_fail (#41710)`` -================================================================================================= =========== ============================= +================================================================================================= =========== ======================================================= 3.8.1 ..... diff --git a/docs/apache-airflow-providers-celery/index.rst b/docs/apache-airflow-providers-celery/index.rst index 85117b0d1d656..151a35d71531a 100644 --- a/docs/apache-airflow-providers-celery/index.rst +++ b/docs/apache-airflow-providers-celery/index.rst @@ -67,7 +67,7 @@ apache-airflow-providers-celery package `Celery `__ -Release: 3.8.2 +Release: 3.8.3 Provider package ---------------- @@ -113,6 +113,7 @@ You can install such cross-provider dependencies when installing from PyPI. For Dependent package Extra ====================================================================================================================== =================== `apache-airflow-providers-cncf-kubernetes `_ ``cncf.kubernetes`` +`apache-airflow-providers-standard `_ ``standard`` ====================================================================================================================== =================== Downloading official packages @@ -121,5 +122,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-celery 3.8.2 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-celery 3.8.2 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-celery 3.8.3 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-celery 3.8.3 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-cloudant/commits.rst b/docs/apache-airflow-providers-cloudant/commits.rst index 5e36a53ba7cc8..bc4a00bc4e880 100644 --- a/docs/apache-airflow-providers-cloudant/commits.rst +++ b/docs/apache-airflow-providers-cloudant/commits.rst @@ -35,14 +35,26 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-08 ``Drop python3.8 support core and providers (#42766)`` +================================================================================================= =========== ====================================================== + 4.0.0 ..... -Latest change: 2024-09-06 +Latest change: 2024-09-21 ================================================================================================= =========== ================================================================================== Commit Committed Subject ================================================================================================= =========== ================================================================================== +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `caa94fa466 `_ 2024-09-06 ``Switch cloudant provider from cloudant library to ibmcloudant library (#41555)`` ================================================================================================= =========== ================================================================================== diff --git a/docs/apache-airflow-providers-cloudant/index.rst b/docs/apache-airflow-providers-cloudant/index.rst index 28d76f291d61d..2de0afb0a7f70 100644 --- a/docs/apache-airflow-providers-cloudant/index.rst +++ b/docs/apache-airflow-providers-cloudant/index.rst @@ -55,7 +55,7 @@ apache-airflow-providers-cloudant package `IBM Cloudant `__ -Release: 4.0.0 +Release: 4.0.1 Provider package ---------------- diff --git a/docs/apache-airflow-providers-cncf-kubernetes/commits.rst b/docs/apache-airflow-providers-cncf-kubernetes/commits.rst index 56bd24e26a786..99dcb3a74a032 100644 --- a/docs/apache-airflow-providers-cncf-kubernetes/commits.rst +++ b/docs/apache-airflow-providers-cncf-kubernetes/commits.rst @@ -35,14 +35,34 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-09 ``Fix mark as success when pod fails while fetching log (#42815)`` +`63ff22f403 `_ 2024-10-08 ``Drop python3.8 support core and providers (#42766)`` +`e5a474bdd1 `_ 2024-10-07 ``kubernetes executor cleanup_stuck_queued_tasks optimization (#41220)`` +`24b7c5b151 `_ 2024-10-04 ``Remove airflow_version from k8s executor pod selector (#42751)`` +`0120515f6c `_ 2024-10-03 ``Added unit tests and restructred 'await_xcom_sidecar_container_start' method. (#42504)`` +`c72dad7eaf `_ 2024-10-01 ``All executors should inherit from BaseExecutor (#41904)`` +`6808619ec5 `_ 2024-09-30 ``KubernetesHook kube_config extra can take dict (#41413)`` +`223acdb13f `_ 2024-09-27 ``Fix SparkKubernetesOperator spark name. (#42427)`` +`7782050201 `_ 2024-09-27 ``KubernetesPodOperator never stops if credentials are refreshed (#42361)`` +================================================================================================= =========== ========================================================================================== + 8.4.2 ..... -Latest change: 2024-09-16 +Latest change: 2024-09-21 ================================================================================================= =========== ================================================================ Commit Committed Subject ================================================================================================= =========== ================================================================ +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `d1e500c450 `_ 2024-09-16 ``Deprecated configuration removed (#42129)`` `1379376b66 `_ 2024-09-02 ``Add TODOs in providers code for Subdag code removal (#41963)`` `86af316c87 `_ 2024-09-02 ``docstring update (#41929)`` diff --git a/docs/apache-airflow-providers-cncf-kubernetes/index.rst b/docs/apache-airflow-providers-cncf-kubernetes/index.rst index fbf78d5bd3538..1db81c09a3f6d 100644 --- a/docs/apache-airflow-providers-cncf-kubernetes/index.rst +++ b/docs/apache-airflow-providers-cncf-kubernetes/index.rst @@ -87,7 +87,7 @@ apache-airflow-providers-cncf-kubernetes package `Kubernetes `__ -Release: 8.4.2 +Release: 9.0.0 Provider package ---------------- diff --git a/docs/apache-airflow-providers-common-compat/commits.rst b/docs/apache-airflow-providers-common-compat/commits.rst index 5349314f57e64..d56bddfc128c3 100644 --- a/docs/apache-airflow-providers-common-compat/commits.rst +++ b/docs/apache-airflow-providers-common-compat/commits.rst @@ -35,14 +35,26 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-09-30 ``Rename dataset related python variable names to asset (#41348)`` +================================================================================================= =========== ================================================================== + 1.2.0 ..... -Latest change: 2024-08-12 +Latest change: 2024-08-19 ================================================================================================= =========== ======================================================================= Commit Committed Subject ================================================================================================= =========== ======================================================================= +`75fb7acbac `_ 2024-08-19 ``Prepare docs for Aug 2nd wave of providers (#41559)`` `fcbff15bda `_ 2024-08-12 ``Bump minimum Airflow version in providers to Airflow 2.8.0 (#41396)`` ================================================================================================= =========== ======================================================================= diff --git a/docs/apache-airflow-providers-common-compat/index.rst b/docs/apache-airflow-providers-common-compat/index.rst index c03a05947bbc0..b5eadbddb0905 100644 --- a/docs/apache-airflow-providers-common-compat/index.rst +++ b/docs/apache-airflow-providers-common-compat/index.rst @@ -62,7 +62,7 @@ apache-airflow-providers-common-compat package ``Common Compatibility Provider - providing compatibility code for previous Airflow versions.`` -Release: 1.2.0 +Release: 1.2.1 Provider package ---------------- @@ -87,3 +87,31 @@ PIP package Version required ================== ================== ``apache-airflow`` ``>=2.8.0`` ================== ================== + +Cross provider package dependencies +----------------------------------- + +Those are dependencies that might be needed in order to use all the features of the package. +You need to install the specified provider packages in order to use them. + +You can install such cross-provider dependencies when installing from PyPI. For example: + +.. code-block:: bash + + pip install apache-airflow-providers-common-compat[openlineage] + + +============================================================================================================== =============== +Dependent package Extra +============================================================================================================== =============== +`apache-airflow-providers-openlineage `_ ``openlineage`` +============================================================================================================== =============== + +Downloading official packages +----------------------------- + +You can download officially released packages and verify their checksums and signatures from the +`Official Apache Download site `_ + +* `The apache-airflow-providers-common-compat 1.2.1 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-common-compat 1.2.1 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-common-io/commits.rst b/docs/apache-airflow-providers-common-io/commits.rst index e2d5fef0daeaa..29a3814ae384b 100644 --- a/docs/apache-airflow-providers-common-io/commits.rst +++ b/docs/apache-airflow-providers-common-io/commits.rst @@ -35,14 +35,27 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-08 ``Drop python3.8 support core and providers (#42766)`` +`ede7cb27fd `_ 2024-09-30 ``Rename dataset related python variable names to asset (#41348)`` +================================================================================================= =========== ================================================================== + 1.4.1 ..... -Latest change: 2024-09-01 +Latest change: 2024-09-21 ================================================================================================= =========== ============================================================================== Commit Committed Subject ================================================================================================= =========== ============================================================================== +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `7a75f0a2bc `_ 2024-09-01 ``Protect against None components of universal pathlib xcom backend (#41921)`` ================================================================================================= =========== ============================================================================== diff --git a/docs/apache-airflow-providers-common-io/index.rst b/docs/apache-airflow-providers-common-io/index.rst index 9a3468d397d8d..606b6c5161bcd 100644 --- a/docs/apache-airflow-providers-common-io/index.rst +++ b/docs/apache-airflow-providers-common-io/index.rst @@ -79,7 +79,7 @@ apache-airflow-providers-common-io package ``Common IO Provider`` -Release: 1.4.1 +Release: 1.4.2 Provider package ---------------- @@ -131,5 +131,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-common-io 1.4.1 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-common-io 1.4.1 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-common-io 1.4.2 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-common-io 1.4.2 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-common-sql/commits.rst b/docs/apache-airflow-providers-common-sql/commits.rst index f719dd7b39811..a84e298074482 100644 --- a/docs/apache-airflow-providers-common-sql/commits.rst +++ b/docs/apache-airflow-providers-common-sql/commits.rst @@ -35,14 +35,27 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-07 ``FIX: Only pass connection to sqlalchemy engine in JdbcHook (#42705)`` +`5973c9cce1 `_ 2024-10-04 ``feat(providers/common/sql): add warning to connection setter (#42736)`` +================================================================================================= =========== ========================================================================= + 1.17.1 ...... -Latest change: 2024-09-26 +Latest change: 2024-09-27 ================================================================================================= =========== ============================================================================================== Commit Committed Subject ================================================================================================= =========== ============================================================================================== +`bcee926d49 `_ 2024-09-27 ``Prepare docs for Sep 2nd adhoc wave of providers (#42519)`` `7ad586ed6a `_ 2024-09-26 ``fix(providers/common/sql): add dummy connection setter for backward compatibility (#42490)`` `47c71108a8 `_ 2024-09-22 ``Changed type hinting for handler function (#42275)`` ================================================================================================= =========== ============================================================================================== diff --git a/docs/apache-airflow-providers-common-sql/index.rst b/docs/apache-airflow-providers-common-sql/index.rst index 573603b2d7d04..f764b57a8b680 100644 --- a/docs/apache-airflow-providers-common-sql/index.rst +++ b/docs/apache-airflow-providers-common-sql/index.rst @@ -77,7 +77,7 @@ apache-airflow-providers-common-sql package `Common SQL Provider `__ -Release: 1.17.1 +Release: 1.18.0 Provider package ---------------- @@ -130,5 +130,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-common-sql 1.17.1 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-common-sql 1.17.1 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-common-sql 1.18.0 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-common-sql 1.18.0 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-databricks/commits.rst b/docs/apache-airflow-providers-databricks/commits.rst index c053936f0ba31..3ead2b2ce6cca 100644 --- a/docs/apache-airflow-providers-databricks/commits.rst +++ b/docs/apache-airflow-providers-databricks/commits.rst @@ -35,14 +35,28 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-08 ``add warning log when task_key>100 (#42813)`` +`5d51beee35 `_ 2024-10-03 ``Add 'on_kill' to Databricks Workflow Operator (#42115)`` +`9b90d2f216 `_ 2024-10-03 ``Add debug logs to print Request/Response data in Databricks provider (#42662)`` +================================================================================================= =========== ================================================================================== + 6.10.0 ...... -Latest change: 2024-08-30 +Latest change: 2024-09-21 ================================================================================================= =========== ============================================================================================================== Commit Committed Subject ================================================================================================= =========== ============================================================================================================== +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `365b42f5a1 `_ 2024-08-30 ``[FEAT] databricks repair run with reason match and appropriate new settings (#41412)`` `8765039214 `_ 2024-08-27 ``Removed deprecated method referance airflow.www.auth.has_access when min airflow version >= 2.8.0 (#41747)`` `1613e9ec1c `_ 2024-08-25 ``remove soft_fail (#41710)`` diff --git a/docs/apache-airflow-providers-databricks/index.rst b/docs/apache-airflow-providers-databricks/index.rst index d7406fdaacd42..5a124818ea4dc 100644 --- a/docs/apache-airflow-providers-databricks/index.rst +++ b/docs/apache-airflow-providers-databricks/index.rst @@ -78,7 +78,7 @@ apache-airflow-providers-databricks package `Databricks `__ -Release: 6.10.0 +Release: 6.11.0 Provider package ---------------- @@ -137,5 +137,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-databricks 6.10.0 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-databricks 6.10.0 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-databricks 6.11.0 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-databricks 6.11.0 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-dbt-cloud/commits.rst b/docs/apache-airflow-providers-dbt-cloud/commits.rst index 6effec6f41cf9..276f3092e534d 100644 --- a/docs/apache-airflow-providers-dbt-cloud/commits.rst +++ b/docs/apache-airflow-providers-dbt-cloud/commits.rst @@ -35,16 +35,29 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-08 ``Simplify code for recent dbt provider change (#42840)`` +`c57be2ffc5 `_ 2024-10-08 ``Add ability to provide proxy for dbt Cloud connection (#42737)`` +================================================================================================= =========== ================================================================== + 3.10.1 ...... -Latest change: 2024-08-25 +Latest change: 2024-09-21 -================================================================================================= =========== ============================= +================================================================================================= =========== ======================================================= Commit Committed Subject -================================================================================================= =========== ============================= +================================================================================================= =========== ======================================================= +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `1613e9ec1c `_ 2024-08-25 ``remove soft_fail (#41710)`` -================================================================================================= =========== ============================= +================================================================================================= =========== ======================================================= 3.10.0 ...... diff --git a/docs/apache-airflow-providers-dbt-cloud/index.rst b/docs/apache-airflow-providers-dbt-cloud/index.rst index 47e64ff933661..f9f214af8323f 100644 --- a/docs/apache-airflow-providers-dbt-cloud/index.rst +++ b/docs/apache-airflow-providers-dbt-cloud/index.rst @@ -81,7 +81,7 @@ apache-airflow-providers-dbt-cloud package `dbt Cloud `__ -Release: 3.10.1 +Release: 3.11.0 Provider package ---------------- @@ -136,5 +136,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-dbt-cloud 3.10.1 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-dbt-cloud 3.10.1 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-dbt-cloud 3.11.0 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-dbt-cloud 3.11.0 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-elasticsearch/commits.rst b/docs/apache-airflow-providers-elasticsearch/commits.rst index 03b11128209c3..0cbc310101489 100644 --- a/docs/apache-airflow-providers-elasticsearch/commits.rst +++ b/docs/apache-airflow-providers-elasticsearch/commits.rst @@ -35,14 +35,27 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-09 ``Fix spelling; 'Airlfow' -> 'Airflow' (#42855)`` +`1cb9294c64 `_ 2024-10-06 ``Removed conditional check for task context logging in airflow version 2.8.0 and above (#42764)`` +================================================================================================= =========== ================================================================================================== + 5.5.1 ..... -Latest change: 2024-09-04 +Latest change: 2024-09-21 ================================================================================================= =========== ================================================================================================= Commit Committed Subject ================================================================================================= =========== ================================================================================================= +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `2e813eb87d `_ 2024-09-04 ``Generalize caching of connection in DbApiHook to improve performance (#40751)`` `b1e4f37505 `_ 2024-09-01 ``Fix ElasticSearch SQLClient deprecation warning (#41871)`` `79f6383c38 `_ 2024-08-21 ``filename template arg in providers file task handlers backward compitability support (#41633)`` diff --git a/docs/apache-airflow-providers-elasticsearch/index.rst b/docs/apache-airflow-providers-elasticsearch/index.rst index 91c65403a88e9..727d852d7cf26 100644 --- a/docs/apache-airflow-providers-elasticsearch/index.rst +++ b/docs/apache-airflow-providers-elasticsearch/index.rst @@ -79,7 +79,7 @@ apache-airflow-providers-elasticsearch package `Elasticsearch `__ -Release: 5.5.1 +Release: 5.5.2 Provider package ---------------- @@ -132,5 +132,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-elasticsearch 5.5.1 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-elasticsearch 5.5.1 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-elasticsearch 5.5.2 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-elasticsearch 5.5.2 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-fab/commits.rst b/docs/apache-airflow-providers-fab/commits.rst index 10c116ccf268e..3401b626d8f8c 100644 --- a/docs/apache-airflow-providers-fab/commits.rst +++ b/docs/apache-airflow-providers-fab/commits.rst @@ -35,6 +35,19 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-01 ``Update Rest API tests to no longer rely on FAB auth manager. Move tests specific to FAB permissions to FAB provider (#42523)`` +`ede7cb27fd `_ 2024-09-30 ``Rename dataset related python variable names to asset (#41348)`` +`2beb6a765d `_ 2024-09-25 ``Simplify expression for get_permitted_dag_ids query (#42484)`` +================================================================================================= =========== ================================================================================================================================ + 1.4.0 ..... @@ -43,6 +56,7 @@ Latest change: 2024-09-21 ================================================================================================= =========== =================================================================================== Commit Committed Subject ================================================================================================= =========== =================================================================================== +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `6a527c9fac `_ 2024-09-21 ``Fix pre-commit for auto update of fab migration versions (#42382)`` `8741e9c176 `_ 2024-09-20 ``Handle 'AUTH_ROLE_PUBLIC' in FAB auth manager (#42280)`` `9f167bbc34 `_ 2024-09-19 ``Add FAB migration commands (#41804)`` diff --git a/docs/apache-airflow-providers-fab/index.rst b/docs/apache-airflow-providers-fab/index.rst index 30fd4b5aa224f..14135d0d15eae 100644 --- a/docs/apache-airflow-providers-fab/index.rst +++ b/docs/apache-airflow-providers-fab/index.rst @@ -83,7 +83,7 @@ apache-airflow-providers-fab package `Flask App Builder `__ -Release: 1.4.0 +Release: 1.4.1 Provider package ---------------- @@ -103,13 +103,42 @@ Requirements The minimum Apache Airflow version supported by this provider package is ``2.9.0``. -==================== ================== -PIP package Version required -==================== ================== -``apache-airflow`` ``>=2.9.0`` -``flask`` ``>=2.2,<2.3`` -``flask-appbuilder`` ``==4.5.0`` -``flask-login`` ``>=0.6.2`` -``google-re2`` ``>=1.0`` -``jmespath`` ``>=0.7.0`` -==================== ================== +========================================== ================== +PIP package Version required +========================================== ================== +``apache-airflow`` ``>=2.9.0`` +``apache-airflow-providers-common-compat`` ``>=1.2.0`` +``flask`` ``>=2.2,<2.3`` +``flask-appbuilder`` ``==4.5.0`` +``flask-login`` ``>=0.6.2`` +``google-re2`` ``>=1.0`` +``jmespath`` ``>=0.7.0`` +========================================== ================== + +Cross provider package dependencies +----------------------------------- + +Those are dependencies that might be needed in order to use all the features of the package. +You need to install the specified provider packages in order to use them. + +You can install such cross-provider dependencies when installing from PyPI. For example: + +.. code-block:: bash + + pip install apache-airflow-providers-fab[common.compat] + + +================================================================================================================== ================= +Dependent package Extra +================================================================================================================== ================= +`apache-airflow-providers-common-compat `_ ``common.compat`` +================================================================================================================== ================= + +Downloading official packages +----------------------------- + +You can download officially released packages and verify their checksums and signatures from the +`Official Apache Download site `_ + +* `The apache-airflow-providers-fab 1.4.1 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-fab 1.4.1 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-google/commits.rst b/docs/apache-airflow-providers-google/commits.rst index fd42bac7809a6..6148a1cf45cf4 100644 --- a/docs/apache-airflow-providers-google/commits.rst +++ b/docs/apache-airflow-providers-google/commits.rst @@ -42,14 +42,36 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-06 ``Removed conditional check for task context logging in airflow version 2.8.0 and above (#42764)`` +`2ffc389574 `_ 2024-10-04 ``Add 'retry_if_resource_not_ready' logic for DataprocCreateClusterOperator and DataprocCreateBatchOperator (#42703)`` +`8a255f3e1e `_ 2024-10-04 ``Publish Dataproc Serverless Batch link after it starts if batch_id was provided (#41153)`` +`9ec21405c0 `_ 2024-10-03 ``fix PubSubAsyncHook in PubsubPullTrigger to use gcp_conn_id (#42671)`` +`64e972c977 `_ 2024-10-02 ``Fix consistent return response from PubSubPullSensor (#42080)`` +`ede7cb27fd `_ 2024-09-30 ``Rename dataset related python variable names to asset (#41348)`` +`5689d64911 `_ 2024-09-27 ``Undo partition exclusion from the table name when splitting a full BigQuery table name (#42541)`` +`dc43d31c5a `_ 2024-09-27 ``Fix gcp text to speech uri fetch (#42309)`` +`56ab422a97 `_ 2024-09-27 ``Deprecate AutoMLBatchPredictOperator and refactor AutoMl system tests (#42260)`` +`b0234cbaf7 `_ 2024-09-27 ``Refactor ''bucket.get_blob'' calls in ''GCSHook'' to handle validation for non-existent objects. (#42474)`` +`eed1d0d254 `_ 2024-09-24 ``Bugfix/dataflow job location passing (#41887)`` +================================================================================================= =========== ====================================================================================================================== + 10.23.0 ....... -Latest change: 2024-09-20 +Latest change: 2024-09-21 ================================================================================================= =========== ==================================================================================================================================== Commit Committed Subject ================================================================================================= =========== ==================================================================================================================================== +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `c051d0aa4c `_ 2024-09-20 ``Add ability to create Flink Jobs in dataproc cluster (#42342)`` `f278e62255 `_ 2024-09-19 ``Minor fixes to ensure successful Vertex AI LLMops pipeline (#41997)`` `d7343816b3 `_ 2024-09-19 ``Mark VertexAI AutoMLText deprecation (#42251)`` diff --git a/docs/apache-airflow-providers-google/index.rst b/docs/apache-airflow-providers-google/index.rst index 1a81bb433e04d..05e05725aa17e 100644 --- a/docs/apache-airflow-providers-google/index.rst +++ b/docs/apache-airflow-providers-google/index.rst @@ -87,7 +87,7 @@ Google services including: - `Google Workspace `__ (formerly Google Suite) -Release: 10.23.0 +Release: 10.24.0 Provider package ---------------- @@ -220,5 +220,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-google 10.23.0 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-google 10.23.0 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-google 10.24.0 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-google 10.24.0 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-jdbc/commits.rst b/docs/apache-airflow-providers-jdbc/commits.rst index c218e7ccf01cf..21ce7e0081e8a 100644 --- a/docs/apache-airflow-providers-jdbc/commits.rst +++ b/docs/apache-airflow-providers-jdbc/commits.rst @@ -35,14 +35,26 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-07 ``FIX: Only pass connection to sqlalchemy engine in JdbcHook (#42705)`` +================================================================================================= =========== ======================================================================= + 4.5.1 ..... -Latest change: 2024-09-04 +Latest change: 2024-09-21 ================================================================================================= =========== ================================================================================= Commit Committed Subject ================================================================================================= =========== ================================================================================= +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `2e813eb87d `_ 2024-09-04 ``Generalize caching of connection in DbApiHook to improve performance (#40751)`` ================================================================================================= =========== ================================================================================= diff --git a/docs/apache-airflow-providers-jdbc/index.rst b/docs/apache-airflow-providers-jdbc/index.rst index b5e48c0be3e4d..e31369841cd7c 100644 --- a/docs/apache-airflow-providers-jdbc/index.rst +++ b/docs/apache-airflow-providers-jdbc/index.rst @@ -78,7 +78,7 @@ apache-airflow-providers-jdbc package `Java Database Connectivity (JDBC) `__ -Release: 4.5.1 +Release: 4.5.2 Provider package ---------------- @@ -131,5 +131,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-jdbc 4.5.1 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-jdbc 4.5.1 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-jdbc 4.5.2 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-jdbc 4.5.2 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-microsoft-azure/commits.rst b/docs/apache-airflow-providers-microsoft-azure/commits.rst index 88454472e5b43..1f18be632c20b 100644 --- a/docs/apache-airflow-providers-microsoft-azure/commits.rst +++ b/docs/apache-airflow-providers-microsoft-azure/commits.rst @@ -35,14 +35,28 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-06 ``Removed conditional check for task context logging in airflow version 2.8.0 and above (#42764)`` +`d89f305a34 `_ 2024-09-29 ``Bugfix/42575 workaround pin azure kusto data (#42576)`` +`193defd289 `_ 2024-09-25 ``(bugfix): Paginated results in MSGraphAsyncOperator (#42414)`` +================================================================================================= =========== ================================================================================================== + 10.5.0 ...... -Latest change: 2024-09-05 +Latest change: 2024-09-21 ================================================================================================= =========== ======================================================================== Commit Committed Subject ================================================================================================= =========== ======================================================================== +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `ede34ebee7 `_ 2024-09-05 ``Allow custom api versions in MSGraphAsyncOperator (#41331)`` `85ad5c374d `_ 2024-09-05 `` Add callback to process Azure Service Bus message contents (#41601)`` `1613e9ec1c `_ 2024-08-25 ``remove soft_fail (#41710)`` diff --git a/docs/apache-airflow-providers-microsoft-azure/index.rst b/docs/apache-airflow-providers-microsoft-azure/index.rst index 879e896cd2371..7a073a3b72341 100644 --- a/docs/apache-airflow-providers-microsoft-azure/index.rst +++ b/docs/apache-airflow-providers-microsoft-azure/index.rst @@ -82,7 +82,7 @@ apache-airflow-providers-microsoft-azure package `Microsoft Azure `__ -Release: 10.5.0 +Release: 10.5.1 Provider package ---------------- @@ -102,9 +102,9 @@ Requirements The minimum Apache Airflow version supported by this provider package is ``2.8.0``. -================================ ================== +================================ =================== PIP package Version required -================================ ================== +================================ =================== ``apache-airflow`` ``>=2.8.0`` ``adlfs`` ``>=2023.10.0`` ``azure-batch`` ``>=8.0.0`` @@ -123,12 +123,12 @@ PIP package Version required ``azure-synapse-artifacts`` ``>=0.17.0`` ``adal`` ``>=1.2.7`` ``azure-storage-file-datalake`` ``>=12.9.1`` -``azure-kusto-data`` ``>=4.1.0`` +``azure-kusto-data`` ``>=4.1.0,!=4.6.0`` ``azure-mgmt-datafactory`` ``>=2.0.0`` ``azure-mgmt-containerregistry`` ``>=8.0.0`` ``azure-mgmt-containerinstance`` ``>=10.1.0`` ``msgraph-core`` ``>=1.0.0`` -================================ ================== +================================ =================== Cross provider package dependencies ----------------------------------- @@ -158,5 +158,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-microsoft-azure 10.5.0 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-microsoft-azure 10.5.0 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-microsoft-azure 10.5.1 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-microsoft-azure 10.5.1 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-mysql/commits.rst b/docs/apache-airflow-providers-mysql/commits.rst index 783a88e8784dd..d01a1f5b98ad8 100644 --- a/docs/apache-airflow-providers-mysql/commits.rst +++ b/docs/apache-airflow-providers-mysql/commits.rst @@ -35,14 +35,26 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-09-30 ``Rename dataset related python variable names to asset (#41348)`` +================================================================================================= =========== ================================================================== + 5.7.1 ..... -Latest change: 2024-09-04 +Latest change: 2024-09-21 ================================================================================================= =========== ================================================================================= Commit Committed Subject ================================================================================================= =========== ================================================================================= +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `2e813eb87d `_ 2024-09-04 ``Generalize caching of connection in DbApiHook to improve performance (#40751)`` ================================================================================================= =========== ================================================================================= diff --git a/docs/apache-airflow-providers-mysql/index.rst b/docs/apache-airflow-providers-mysql/index.rst index 63b68a2fcac2a..622084d88957c 100644 --- a/docs/apache-airflow-providers-mysql/index.rst +++ b/docs/apache-airflow-providers-mysql/index.rst @@ -77,7 +77,7 @@ apache-airflow-providers-mysql package `MySQL `__ -Release: 5.7.1 +Release: 5.7.2 Provider package ---------------- @@ -136,5 +136,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-mysql 5.7.1 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-mysql 5.7.1 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-mysql 5.7.2 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-mysql 5.7.2 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-openlineage/commits.rst b/docs/apache-airflow-providers-openlineage/commits.rst index d2e20868233c3..41025a8f7b445 100644 --- a/docs/apache-airflow-providers-openlineage/commits.rst +++ b/docs/apache-airflow-providers-openlineage/commits.rst @@ -35,14 +35,28 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-09 ``Standard provider bash operator (#42252)`` +`63ff22f403 `_ 2024-10-08 ``Drop python3.8 support core and providers (#42766)`` +`ede7cb27fd `_ 2024-09-30 ``Rename dataset related python variable names to asset (#41348)`` +================================================================================================= =========== ================================================================== + 1.12.1 ...... -Latest change: 2024-09-26 +Latest change: 2024-09-27 ================================================================================================= =========== ======================================================================== Commit Committed Subject ================================================================================================= =========== ======================================================================== +`bcee926d49 `_ 2024-09-27 ``Prepare docs for Sep 2nd adhoc wave of providers (#42519)`` `84e8cdf674 `_ 2024-09-26 ``fix: OL dag start event not being emitted (#42448)`` `ffff0e8b33 `_ 2024-09-23 ``Fix typo in error stack trace formatting for clearer output (#42017)`` ================================================================================================= =========== ======================================================================== diff --git a/docs/apache-airflow-providers-openlineage/index.rst b/docs/apache-airflow-providers-openlineage/index.rst index 817bddd6c0b74..f0173f4e2c6fe 100644 --- a/docs/apache-airflow-providers-openlineage/index.rst +++ b/docs/apache-airflow-providers-openlineage/index.rst @@ -73,7 +73,7 @@ apache-airflow-providers-openlineage package `OpenLineage `__ -Release: 1.12.1 +Release: 1.12.2 Provider package ---------------- @@ -114,14 +114,15 @@ You can install such cross-provider dependencies when installing from PyPI. For .. code-block:: bash - pip install apache-airflow-providers-openlineage[common.sql] + pip install apache-airflow-providers-openlineage[common.compat] -============================================================================================================ ============== -Dependent package Extra -============================================================================================================ ============== -`apache-airflow-providers-common-sql `_ ``common.sql`` -============================================================================================================ ============== +================================================================================================================== ================= +Dependent package Extra +================================================================================================================== ================= +`apache-airflow-providers-common-compat `_ ``common.compat`` +`apache-airflow-providers-common-sql `_ ``common.sql`` +================================================================================================================== ================= Downloading official packages ----------------------------- @@ -129,5 +130,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-openlineage 1.12.1 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-openlineage 1.12.1 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-openlineage 1.12.2 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-openlineage 1.12.2 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-opensearch/commits.rst b/docs/apache-airflow-providers-opensearch/commits.rst index 7d797af110858..852704be06bee 100644 --- a/docs/apache-airflow-providers-opensearch/commits.rst +++ b/docs/apache-airflow-providers-opensearch/commits.rst @@ -35,14 +35,28 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-06 ``Removed conditional check for task context logging in airflow version 2.8.0 and above (#42764)`` +`b0a18d9019 `_ 2024-10-04 ``Handle empty login and password with opensearch client (#39982)`` +`ba1c602533 `_ 2024-09-21 ``(feat): Add opensearch logging integration (#41799)`` +================================================================================================= =========== ================================================================================================== + 1.4.0 ..... -Latest change: 2024-08-12 +Latest change: 2024-08-19 ================================================================================================= =========== ======================================================================= Commit Committed Subject ================================================================================================= =========== ======================================================================= +`75fb7acbac `_ 2024-08-19 ``Prepare docs for Aug 2nd wave of providers (#41559)`` `fcbff15bda `_ 2024-08-12 ``Bump minimum Airflow version in providers to Airflow 2.8.0 (#41396)`` ================================================================================================= =========== ======================================================================= diff --git a/docs/apache-airflow-providers-opensearch/index.rst b/docs/apache-airflow-providers-opensearch/index.rst index d15064228892b..de79657141fad 100644 --- a/docs/apache-airflow-providers-opensearch/index.rst +++ b/docs/apache-airflow-providers-opensearch/index.rst @@ -78,7 +78,7 @@ apache-airflow-providers-opensearch package `OpenSearch `__ -Release: 1.4.0 +Release: 1.5.0 Provider package ---------------- diff --git a/docs/apache-airflow-providers-postgres/commits.rst b/docs/apache-airflow-providers-postgres/commits.rst index 538927251f5f3..a1ec69eff688c 100644 --- a/docs/apache-airflow-providers-postgres/commits.rst +++ b/docs/apache-airflow-providers-postgres/commits.rst @@ -35,14 +35,26 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-09-30 ``Rename dataset related python variable names to asset (#41348)`` +================================================================================================= =========== ================================================================== + 5.13.0 ...... -Latest change: 2024-09-05 +Latest change: 2024-09-21 ================================================================================================= =========== ================================================================================= Commit Committed Subject ================================================================================================= =========== ================================================================================= +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `17c30b4f21 `_ 2024-09-05 ``feat: log client db messages for provider postgres (#40171)`` `2e813eb87d `_ 2024-09-04 ``Generalize caching of connection in DbApiHook to improve performance (#40751)`` ================================================================================================= =========== ================================================================================= diff --git a/docs/apache-airflow-providers-postgres/index.rst b/docs/apache-airflow-providers-postgres/index.rst index 9b6124999a972..6fbd69ba1c5aa 100644 --- a/docs/apache-airflow-providers-postgres/index.rst +++ b/docs/apache-airflow-providers-postgres/index.rst @@ -77,7 +77,7 @@ apache-airflow-providers-postgres package `PostgreSQL `__ -Release: 5.13.0 +Release: 5.13.1 Provider package ---------------- @@ -132,5 +132,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-postgres 5.13.0 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-postgres 5.13.0 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-postgres 5.13.1 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-postgres 5.13.1 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-snowflake/commits.rst b/docs/apache-airflow-providers-snowflake/commits.rst index 2b7c230e5e726..9d6c44634188b 100644 --- a/docs/apache-airflow-providers-snowflake/commits.rst +++ b/docs/apache-airflow-providers-snowflake/commits.rst @@ -35,14 +35,28 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-03 ``- Snowflake does not currently support variable binding in multi-statement SQL requests. (#42719)`` +`976064dc6c `_ 2024-10-02 ``Add Snowpark operator and decorator (#42457)`` +`8a5eb22df1 `_ 2024-10-02 ``Make 'private_key_content' a sensitive field in Snowflake connection (#42649)`` +================================================================================================= =========== ===================================================================================================== + 5.7.1 ..... -Latest change: 2024-08-27 +Latest change: 2024-09-21 ================================================================================================= =========== ==================================================================== Commit Committed Subject ================================================================================================= =========== ==================================================================== +`7628d47d04 `_ 2024-09-21 ``Prepare docs for Sep 1st wave of providers (#42387)`` `33f81bfb93 `_ 2024-08-27 ``Update snowflake naming for account names and locators. (#41775)`` ================================================================================================= =========== ==================================================================== diff --git a/docs/apache-airflow-providers-snowflake/index.rst b/docs/apache-airflow-providers-snowflake/index.rst index b00ea39c5261e..c9746f7806943 100644 --- a/docs/apache-airflow-providers-snowflake/index.rst +++ b/docs/apache-airflow-providers-snowflake/index.rst @@ -78,7 +78,7 @@ apache-airflow-providers-snowflake package `Snowflake `__ -Release: 5.7.1 +Release: 5.8.0 Provider package ---------------- @@ -109,6 +109,7 @@ PIP package Version required ``pyarrow`` ``>=14.0.1`` ``snowflake-connector-python`` ``>=3.7.1`` ``snowflake-sqlalchemy`` ``>=1.4.0`` +``snowflake-snowpark-python`` ``>=1.17.0; python_version < "3.12"`` ========================================== ========================================= Cross provider package dependencies @@ -138,5 +139,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-snowflake 5.7.1 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-snowflake 5.7.1 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-snowflake 5.8.0 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-snowflake 5.8.0 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-trino/commits.rst b/docs/apache-airflow-providers-trino/commits.rst index f001e5d9dea15..46482cdfd610e 100644 --- a/docs/apache-airflow-providers-trino/commits.rst +++ b/docs/apache-airflow-providers-trino/commits.rst @@ -35,14 +35,26 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-09-30 ``Rename dataset related python variable names to asset (#41348)`` +================================================================================================= =========== ================================================================== + 5.8.0 ..... -Latest change: 2024-08-12 +Latest change: 2024-08-19 ================================================================================================= =========== ======================================================================= Commit Committed Subject ================================================================================================= =========== ======================================================================= +`75fb7acbac `_ 2024-08-19 ``Prepare docs for Aug 2nd wave of providers (#41559)`` `fcbff15bda `_ 2024-08-12 ``Bump minimum Airflow version in providers to Airflow 2.8.0 (#41396)`` ================================================================================================= =========== ======================================================================= diff --git a/docs/apache-airflow-providers-trino/index.rst b/docs/apache-airflow-providers-trino/index.rst index 07991e5da90b5..9a17b5907e5db 100644 --- a/docs/apache-airflow-providers-trino/index.rst +++ b/docs/apache-airflow-providers-trino/index.rst @@ -78,7 +78,7 @@ apache-airflow-providers-trino package `Trino `__ -Release: 5.8.0 +Release: 5.8.1 Provider package ---------------- @@ -135,5 +135,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-trino 5.8.0 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-trino 5.8.0 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-trino 5.8.1 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-trino 5.8.1 wheel package `_ (`asc `__, `sha512 `__) diff --git a/docs/apache-airflow-providers-ydb/commits.rst b/docs/apache-airflow-providers-ydb/commits.rst index 01b90e938deb1..796104f3acde8 100644 --- a/docs/apache-airflow-providers-ydb/commits.rst +++ b/docs/apache-airflow-providers-ydb/commits.rst @@ -35,14 +35,26 @@ For high-level changelog, see :doc:`package information including changelog `_ 2024-10-03 ``Add an ability to use scan queries via new YDB operator (#42311)`` +================================================================================================= =========== ==================================================================== + 1.3.0 ..... -Latest change: 2024-08-14 +Latest change: 2024-08-19 ================================================================================================= =========== ==================================================================================================== Commit Committed Subject ================================================================================================= =========== ==================================================================================================== +`75fb7acbac `_ 2024-08-19 ``Prepare docs for Aug 2nd wave of providers (#41559)`` `6e22364278 `_ 2024-08-14 ``ydb provider: add database to table name in bulk upsert, use bulk upsert in system test (#41303)`` `fcbff15bda `_ 2024-08-12 ``Bump minimum Airflow version in providers to Airflow 2.8.0 (#41396)`` ================================================================================================= =========== ==================================================================================================== diff --git a/docs/apache-airflow-providers-ydb/index.rst b/docs/apache-airflow-providers-ydb/index.rst index 45cbd2b00b7a6..4dff0e421f9bd 100644 --- a/docs/apache-airflow-providers-ydb/index.rst +++ b/docs/apache-airflow-providers-ydb/index.rst @@ -77,7 +77,7 @@ apache-airflow-providers-ydb package `YDB `__ -Release: 1.3.0 +Release: 1.4.0 Provider package ---------------- @@ -130,5 +130,5 @@ Downloading official packages You can download officially released packages and verify their checksums and signatures from the `Official Apache Download site `_ -* `The apache-airflow-providers-ydb 1.3.0 sdist package `_ (`asc `__, `sha512 `__) -* `The apache-airflow-providers-ydb 1.3.0 wheel package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-ydb 1.4.0 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-ydb 1.4.0 wheel package `_ (`asc `__, `sha512 `__) From 51f6e2ffae55138a1ca2169b91c430cc650d9b8a Mon Sep 17 00:00:00 2001 From: Danny Liu Date: Wed, 9 Oct 2024 11:41:53 -0700 Subject: [PATCH 044/125] fix more PT004 PyDocStyle checks (#42841) --- tests/www/views/test_views_acl.py | 8 ++-- .../www/views/test_views_cluster_activity.py | 12 +++--- tests/www/views/test_views_connection.py | 2 +- tests/www/views/test_views_dagrun.py | 4 +- tests/www/views/test_views_dataset.py | 2 +- tests/www/views/test_views_decorators.py | 2 +- tests/www/views/test_views_extra_links.py | 6 +-- tests/www/views/test_views_grid.py | 6 +-- tests/www/views/test_views_home.py | 38 +++++++++---------- tests/www/views/test_views_log.py | 2 +- tests/www/views/test_views_pool.py | 2 +- tests/www/views/test_views_rendered.py | 4 +- 12 files changed, 44 insertions(+), 44 deletions(-) diff --git a/tests/www/views/test_views_acl.py b/tests/www/views/test_views_acl.py index 139644f67a6da..7b60a2c11ecad 100644 --- a/tests/www/views/test_views_acl.py +++ b/tests/www/views/test_views_acl.py @@ -136,13 +136,13 @@ def acl_app(app): @pytest.fixture(scope="module") -def reset_dagruns(): +def _reset_dagruns(): """Clean up stray garbage from other tests.""" clear_db_runs() @pytest.fixture(autouse=True) -def init_dagruns(acl_app, reset_dagruns): +def _init_dagruns(acl_app, _reset_dagruns): triggered_by_kwargs = {"triggered_by": DagRunTriggeredByType.TEST} if AIRFLOW_V_3_0_PLUS else {} acl_app.dag_bag.get_dag("example_bash_operator").create_dagrun( run_id=DEFAULT_RUN_ID, @@ -322,7 +322,7 @@ def test_dag_autocomplete_dag_display_name(client_all_dags): @pytest.fixture -def setup_paused_dag(): +def _setup_paused_dag(): """Pause a DAG so we can test filtering.""" dag_to_pause = "example_branch_operator" with create_session() as session: @@ -339,7 +339,7 @@ def setup_paused_dag(): ("paused", "example_branch_operator", "example_branch_labels"), ], ) -@pytest.mark.usefixtures("setup_paused_dag") +@pytest.mark.usefixtures("_setup_paused_dag") def test_dag_autocomplete_status(client_all_dags, status, expected, unexpected): with client_all_dags.session_transaction() as flask_session: flask_session[FILTER_STATUS_COOKIE] = status diff --git a/tests/www/views/test_views_cluster_activity.py b/tests/www/views/test_views_cluster_activity.py index cdcebc1c8cf34..543d563e8da03 100644 --- a/tests/www/views/test_views_cluster_activity.py +++ b/tests/www/views/test_views_cluster_activity.py @@ -39,20 +39,20 @@ def examples_dag_bag(): @pytest.fixture(autouse=True) -def clean(): +def _clean(): clear_db_runs() yield clear_db_runs() -# freeze time fixture so that it is applied before `make_dag_runs` is! +# freeze time fixture so that it is applied before `_make_dag_runs` is! @pytest.fixture -def freeze_time_for_dagruns(time_machine): +def _freeze_time_for_dagruns(time_machine): time_machine.move_to("2023-05-02T00:00:00+00:00", tick=False) @pytest.fixture -def make_dag_runs(dag_maker, session, time_machine): +def _make_dag_runs(dag_maker, session, time_machine): with dag_maker( dag_id="test_dag_id", serialized=True, @@ -99,7 +99,7 @@ def make_dag_runs(dag_maker, session, time_machine): session.flush() -@pytest.mark.usefixtures("freeze_time_for_dagruns", "make_dag_runs") +@pytest.mark.usefixtures("_freeze_time_for_dagruns", "_make_dag_runs") def test_historical_metrics_data(admin_client, session, time_machine): resp = admin_client.get( "/object/historical_metrics_data?start_date=2023-01-01T00:00&end_date=2023-08-02T00:00", @@ -127,7 +127,7 @@ def test_historical_metrics_data(admin_client, session, time_machine): } -@pytest.mark.usefixtures("freeze_time_for_dagruns", "make_dag_runs") +@pytest.mark.usefixtures("_freeze_time_for_dagruns", "_make_dag_runs") def test_historical_metrics_data_date_filters(admin_client, session): resp = admin_client.get( "/object/historical_metrics_data?start_date=2023-02-02T00:00&end_date=2023-06-02T00:00", diff --git a/tests/www/views/test_views_connection.py b/tests/www/views/test_views_connection.py index a209cdfc2be8a..7530a1f677f9b 100644 --- a/tests/www/views/test_views_connection.py +++ b/tests/www/views/test_views_connection.py @@ -47,7 +47,7 @@ def conn_with_extra() -> dict[str, Any]: @pytest.fixture(autouse=True) -def clear_connections(): +def _clear_connections(): with create_session() as session: session.query(Connection).delete() diff --git a/tests/www/views/test_views_dagrun.py b/tests/www/views/test_views_dagrun.py index d95955246ac78..9e45464c28aa1 100644 --- a/tests/www/views/test_views_dagrun.py +++ b/tests/www/views/test_views_dagrun.py @@ -93,7 +93,7 @@ def client_dr_without_dag_run_create(app): @pytest.fixture(scope="module", autouse=True) -def init_blank_dagrun(): +def _init_blank_dagrun(): """Make sure there are no runs before we test anything. This really shouldn't be needed, but tests elsewhere leave the db dirty. @@ -104,7 +104,7 @@ def init_blank_dagrun(): @pytest.fixture(autouse=True) -def reset_dagrun(): +def _reset_dagrun(): yield with create_session() as session: session.query(DagRun).delete() diff --git a/tests/www/views/test_views_dataset.py b/tests/www/views/test_views_dataset.py index 3d3351bb6493a..3db16a996862e 100644 --- a/tests/www/views/test_views_dataset.py +++ b/tests/www/views/test_views_dataset.py @@ -32,7 +32,7 @@ class TestDatasetEndpoint: @pytest.fixture(autouse=True) - def cleanup(self): + def _cleanup(self): clear_db_assets() yield clear_db_assets() diff --git a/tests/www/views/test_views_decorators.py b/tests/www/views/test_views_decorators.py index e1bc1db0efc48..86bce2f3bbbc2 100644 --- a/tests/www/views/test_views_decorators.py +++ b/tests/www/views/test_views_decorators.py @@ -78,7 +78,7 @@ def dagruns(bash_dag, xcom_dag): @pytest.fixture(autouse=True) -def clean_db(): +def _clean_db(): clear_db_variables() yield clear_db_variables() diff --git a/tests/www/views/test_views_extra_links.py b/tests/www/views/test_views_extra_links.py index 55b2424f3a9f0..4fb0d2d40b017 100644 --- a/tests/www/views/test_views_extra_links.py +++ b/tests/www/views/test_views_extra_links.py @@ -101,7 +101,7 @@ def dag_run(create_dag_run, session): @pytest.fixture(scope="module", autouse=True) -def patched_app(app, dag): +def _patched_app(app, dag): with mock.patch.object(app, "dag_bag") as mock_dag_bag: mock_dag_bag.get_dag.return_value = dag yield @@ -123,7 +123,7 @@ def task_3(dag): @pytest.fixture(scope="module", autouse=True) -def init_blank_task_instances(): +def _init_blank_task_instances(): """Make sure there are no runs before we test anything. This really shouldn't be needed, but tests elsewhere leave the db dirty. @@ -132,7 +132,7 @@ def init_blank_task_instances(): @pytest.fixture(autouse=True) -def reset_task_instances(): +def _reset_task_instances(): yield clear_db_runs() diff --git a/tests/www/views/test_views_grid.py b/tests/www/views/test_views_grid.py index b4dd6f6082e57..8d5e4e22e04b8 100644 --- a/tests/www/views/test_views_grid.py +++ b/tests/www/views/test_views_grid.py @@ -54,7 +54,7 @@ def examples_dag_bag(): @pytest.fixture(autouse=True) -def clean(): +def _clean(): clear_db_runs() clear_db_assets() yield @@ -187,11 +187,11 @@ def test_grid_data_filtered_on_run_type_and_run_state(admin_client, dag_with_run # Create this as a fixture so that it is applied before the `dag_with_runs` fixture is! @pytest.fixture -def freeze_time_for_dagruns(time_machine): +def _freeze_time_for_dagruns(time_machine): time_machine.move_to("2022-01-02T00:00:00+00:00", tick=False) -@pytest.mark.usefixtures("freeze_time_for_dagruns") +@pytest.mark.usefixtures("_freeze_time_for_dagruns") def test_one_run(admin_client, dag_with_runs: list[DagRun], session): """ Test a DAG with complex interaction of states: diff --git a/tests/www/views/test_views_home.py b/tests/www/views/test_views_home.py index 44dda24feecbc..6a5bf3c4ad531 100644 --- a/tests/www/views/test_views_home.py +++ b/tests/www/views/test_views_home.py @@ -42,7 +42,7 @@ def clean_db(): @pytest.fixture(autouse=True) -def setup(): +def _setup(): clean_db() yield clean_db() @@ -72,7 +72,7 @@ def test_home(capture_templates, admin_client): @mock.patch("airflow.www.views.AirflowBaseView.render_template") -def test_home_dags_count(render_template_mock, admin_client, working_dags, session): +def test_home_dags_count(render_template_mock, admin_client, _working_dags, session): from sqlalchemy import update from airflow.models.dag import DagModel @@ -204,7 +204,7 @@ def _process_file(file_path): @pytest.fixture -def working_dags(tmp_path): +def _working_dags(tmp_path): dag_contents_template = "from airflow import DAG\ndag = DAG('{}', schedule=None, tags=['{}'])" for dag_id, tag in zip(TEST_FILTER_DAG_IDS, TEST_TAGS): path = tmp_path / f"{dag_id}.py" @@ -213,7 +213,7 @@ def working_dags(tmp_path): @pytest.fixture -def working_dags_with_read_perm(tmp_path): +def _working_dags_with_read_perm(tmp_path): dag_contents_template = "from airflow import DAG\ndag = DAG('{}', schedule=None, tags=['{}'])" dag_contents_template_with_read_perm = ( "from airflow import DAG\ndag = DAG('{}', schedule=None, tags=['{}'], " @@ -229,7 +229,7 @@ def working_dags_with_read_perm(tmp_path): @pytest.fixture -def working_dags_with_edit_perm(tmp_path): +def _working_dags_with_edit_perm(tmp_path): dag_contents_template = "from airflow import DAG\ndag = DAG('{}', schedule=None, tags=['{}'])" dag_contents_template_with_read_perm = ( "from airflow import DAG\ndag = DAG('{}', schedule=None, tags=['{}'], " @@ -245,7 +245,7 @@ def working_dags_with_edit_perm(tmp_path): @pytest.fixture -def broken_dags(tmp_path, working_dags): +def _broken_dags(tmp_path, _working_dags): for dag_id in TEST_FILTER_DAG_IDS: path = tmp_path / f"{dag_id}.py" path.write_text("airflow DAG") @@ -253,7 +253,7 @@ def broken_dags(tmp_path, working_dags): @pytest.fixture -def broken_dags_with_read_perm(tmp_path, working_dags_with_read_perm): +def _broken_dags_with_read_perm(tmp_path, _working_dags_with_read_perm): for dag_id in TEST_FILTER_DAG_IDS: path = tmp_path / f"{dag_id}.py" path.write_text("airflow DAG") @@ -261,7 +261,7 @@ def broken_dags_with_read_perm(tmp_path, working_dags_with_read_perm): @pytest.fixture -def broken_dags_after_working(tmp_path): +def _broken_dags_after_working(tmp_path): # First create and process a DAG file that works path = tmp_path / "all_in_one.py" contents = "from airflow import DAG\n" @@ -275,7 +275,7 @@ def broken_dags_after_working(tmp_path): _process_file(path) -def test_home_filter_tags(working_dags, admin_client): +def test_home_filter_tags(_working_dags, admin_client): with admin_client: admin_client.get("home?tags=example&tags=data", follow_redirects=True) assert "example,data" == flask.session[FILTER_TAGS_COOKIE] @@ -284,7 +284,7 @@ def test_home_filter_tags(working_dags, admin_client): assert flask.session[FILTER_TAGS_COOKIE] is None -def test_home_importerrors(broken_dags, user_client): +def test_home_importerrors(_broken_dags, user_client): # Users with "can read on DAGs" gets all DAG import errors resp = user_client.get("home", follow_redirects=True) check_content_in_response("Import Errors", resp) @@ -292,7 +292,7 @@ def test_home_importerrors(broken_dags, user_client): check_content_in_response(f"/{dag_id}.py", resp) -def test_home_no_importerrors_perm(broken_dags, client_no_importerror): +def test_home_no_importerrors_perm(_broken_dags, client_no_importerror): # Users without "can read on import errors" don't see any import errors resp = client_no_importerror.get("home", follow_redirects=True) check_content_not_in_response("Import Errors", resp) @@ -310,7 +310,7 @@ def test_home_no_importerrors_perm(broken_dags, client_no_importerror): "home?lastrun=all_states", ], ) -def test_home_importerrors_filtered_singledag_user(broken_dags_with_read_perm, client_single_dag, page): +def test_home_importerrors_filtered_singledag_user(_broken_dags_with_read_perm, client_single_dag, page): # Users that can only see certain DAGs get a filtered list of import errors resp = client_single_dag.get(page, follow_redirects=True) check_content_in_response("Import Errors", resp) @@ -322,7 +322,7 @@ def test_home_importerrors_filtered_singledag_user(broken_dags_with_read_perm, c check_content_not_in_response(f"/{dag_id}.py", resp) -def test_home_importerrors_missing_read_on_all_dags_in_file(broken_dags_after_working, client_single_dag): +def test_home_importerrors_missing_read_on_all_dags_in_file(_broken_dags_after_working, client_single_dag): # If a user doesn't have READ on all DAGs in a file, that files traceback is redacted resp = client_single_dag.get("home", follow_redirects=True) check_content_in_response("Import Errors", resp) @@ -333,14 +333,14 @@ def test_home_importerrors_missing_read_on_all_dags_in_file(broken_dags_after_wo check_content_in_response("REDACTED", resp) -def test_home_dag_list(working_dags, user_client): +def test_home_dag_list(_working_dags, user_client): # Users with "can read on DAGs" gets all DAGs resp = user_client.get("home", follow_redirects=True) for dag_id in TEST_FILTER_DAG_IDS: check_content_in_response(f"dag_id={dag_id}", resp) -def test_home_dag_list_filtered_singledag_user(working_dags_with_read_perm, client_single_dag): +def test_home_dag_list_filtered_singledag_user(_working_dags_with_read_perm, client_single_dag): # Users that can only see certain DAGs get a filtered list resp = client_single_dag.get("home", follow_redirects=True) # They can see the first DAG @@ -350,7 +350,7 @@ def test_home_dag_list_filtered_singledag_user(working_dags_with_read_perm, clie check_content_not_in_response(f"dag_id={dag_id}", resp) -def test_home_dag_list_search(working_dags, user_client): +def test_home_dag_list_search(_working_dags, user_client): resp = user_client.get("home?search=filter_test", follow_redirects=True) check_content_in_response("dag_id=filter_test_1", resp) check_content_in_response("dag_id=filter_test_2", resp) @@ -358,7 +358,7 @@ def test_home_dag_list_search(working_dags, user_client): check_content_not_in_response("dag_id=a_first_dag_id_asc", resp) -def test_home_dag_edit_permissions(capture_templates, working_dags_with_edit_perm, client_single_dag_edit): +def test_home_dag_edit_permissions(capture_templates, _working_dags_with_edit_perm, client_single_dag_edit): with capture_templates() as templates: client_single_dag_edit.get("home", follow_redirects=True) @@ -446,7 +446,7 @@ def test_dashboard_flash_messages_type(user_client): ], ids=["no_order_provided", "ascending_order_on_dag_id", "descending_order_on_dag_id"], ) -def test_sorting_home_view(url, lower_key, greater_key, user_client, working_dags): +def test_sorting_home_view(url, lower_key, greater_key, user_client, _working_dags): resp = user_client.get(url, follow_redirects=True) resp_html = resp.data.decode("utf-8") lower_index = resp_html.find(lower_key) @@ -489,7 +489,7 @@ def test_analytics_pixel(user_client, is_enabled, should_have_pixel): ], ) def test_filter_cookie_eval( - working_dags, + _working_dags, admin_client, url, filter_tags_cookie_val, diff --git a/tests/www/views/test_views_log.py b/tests/www/views/test_views_log.py index f59fe7418f395..72e282c7b3f8c 100644 --- a/tests/www/views/test_views_log.py +++ b/tests/www/views/test_views_log.py @@ -117,7 +117,7 @@ def factory(): @pytest.fixture(autouse=True) -def reset_modules_after_every_test(backup_modules): +def _reset_modules_after_every_test(backup_modules): yield # Remove any new modules imported during the test run. # This lets us import the same source files for more than one test. diff --git a/tests/www/views/test_views_pool.py b/tests/www/views/test_views_pool.py index 3fcacbbbf8bed..020c5353a5f34 100644 --- a/tests/www/views/test_views_pool.py +++ b/tests/www/views/test_views_pool.py @@ -36,7 +36,7 @@ @pytest.fixture(autouse=True) -def clear_pools(): +def _clear_pools(): with create_session() as session: session.query(Pool).delete() diff --git a/tests/www/views/test_views_rendered.py b/tests/www/views/test_views_rendered.py index 2d1754af29f65..87e693d5400ab 100644 --- a/tests/www/views/test_views_rendered.py +++ b/tests/www/views/test_views_rendered.py @@ -119,7 +119,7 @@ def task_secret(dag): @pytest.fixture(scope="module", autouse=True) -def init_blank_db(): +def _init_blank_db(): """Make sure there are no runs before we test anything. This really shouldn't be needed, but tests elsewhere leave the db dirty. @@ -130,7 +130,7 @@ def init_blank_db(): @pytest.fixture(autouse=True) -def reset_db(dag, task1, task2, task3, task4, task_secret): +def _reset_db(dag, task1, task2, task3, task4, task_secret): yield clear_db_dags() clear_db_runs() From bf002356ba0a38d317adeb651f066588908a79a3 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Wed, 9 Oct 2024 19:45:50 +0100 Subject: [PATCH 045/125] Revert Asset to Dataset for Core Extension Doc (#42867) This PR reverts Core Extension doc change (https://airflow.apache.org/docs/apache-airflow-providers/core-extensions/index.html) from https://github.com/apache/airflow/pull/41348 Since those docs only work on Airflow stable, we can only change this after 3.0 --- .../core-extensions/asset-schemes.rst | 11 +++++++---- .../howto/create-custom-providers.rst | 7 +++++-- docs/exts/operators_and_hooks_ref.py | 2 +- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/docs/apache-airflow-providers/core-extensions/asset-schemes.rst b/docs/apache-airflow-providers/core-extensions/asset-schemes.rst index 0a629b9f8a4fd..87bfd8ae67652 100644 --- a/docs/apache-airflow-providers/core-extensions/asset-schemes.rst +++ b/docs/apache-airflow-providers/core-extensions/asset-schemes.rst @@ -15,14 +15,17 @@ specific language governing permissions and limitations under the License. -asset URI Schemes +Dataset URI Schemes ------------------- -This is a summary of all Apache Airflow Community provided implementations of asset URI schemes +This is a summary of all Apache Airflow Community provided implementations of dataset URI schemes exposed via Airflow core and community-managed providers. -See :doc:`documentation on assets ` for details on how asset URIs work. +.. + TODO: Change this from Dataset to Asset in Airflow 3.0 -.. airflow-asset-schemes:: +See :doc:`documentation on Datasets ` for details on how dataset URIs work. + +.. airflow-dataset-schemes:: :tags: None :header-separator: " diff --git a/docs/apache-airflow-providers/howto/create-custom-providers.rst b/docs/apache-airflow-providers/howto/create-custom-providers.rst index d95719e38bc6c..ba6d8068c4281 100644 --- a/docs/apache-airflow-providers/howto/create-custom-providers.rst +++ b/docs/apache-airflow-providers/howto/create-custom-providers.rst @@ -96,9 +96,12 @@ Exposing customized functionality to the Airflow's core: * ``filesystems`` - this field should contain the list of all the filesystem module names. See :doc:`apache-airflow:core-concepts/objectstorage` for description of the filesystems. -* ``asset-uris`` - this field should contain the list of the URI schemes together with +.. + TODO: Change this from Dataset to Asset in Airflow 3.0 + +* ``dataset-uris`` - this field should contain the list of the URI schemes together with class names implementing normalization functions. - See :doc:`apache-airflow:authoring-and-scheduling/assets` for description of the asset URIs. + See :doc:`apache-airflow:authoring-and-scheduling/datasets` for description of the dataset URIs. .. note:: Deprecated values diff --git a/docs/exts/operators_and_hooks_ref.py b/docs/exts/operators_and_hooks_ref.py index fe6cd5d3300d2..25b9d1d779426 100644 --- a/docs/exts/operators_and_hooks_ref.py +++ b/docs/exts/operators_and_hooks_ref.py @@ -538,7 +538,7 @@ def setup(app): app.add_directive("airflow-executors", ExecutorsDirective) app.add_directive("airflow-deferrable-operators", DeferrableOperatorDirective) app.add_directive("airflow-deprecations", DeprecationsDirective) - app.add_directive("airflow-asset-schemes", DatasetSchemeDirective) + app.add_directive("airflow-dataset-schemes", DatasetSchemeDirective) return {"parallel_read_safe": True, "parallel_write_safe": True} From 857ca4c06c9008593674cabdd28d3c30e3e7f97b Mon Sep 17 00:00:00 2001 From: Ash Berlin-Taylor Date: Wed, 9 Oct 2024 20:24:53 +0100 Subject: [PATCH 046/125] Split providers out of the main "airflow/" tree into a UV workspace project (#42505) This is only a partial split so far. It moves all the code and tests, but leaves the creation of `core/` to a separate PR as this is already large enough. In addition to the straight file rename the other changes I had to make here are: - Some mypy/typing fixes. Mypy can be fragile about what it picks up when, so maybe some of those changes were caused by that. But the typing changes aren't large. - Improve typing in common.sql type stub Again, likely a mypy file oddity, but the types should be safe - Removed the `check-providers-init-file-missing` check This isn't needed now that airflow/providers shouldn't exist at all in the main tree. - Create a "dev.tests_common" package that contains helper files and common pytest fixtures Since the provider tests are no longer under tests/ they don't automatically share the fixtures from the parent `tests/conftest.py` so they needed extracted. Ditto for `tests.test_utils` -- they can't be easily imported in provider tests anymore, so they are moved to a more explicit shared location. In future we should switch how the CI image is built to make better use of UV caching than our own approach as that would remvoe a lot of custom code. --- .dockerignore | 1 + .github/boring-cyborg.yml | 500 +++--- .github/workflows/basic-tests.yml | 5 +- .pre-commit-config.yaml | 142 +- Dockerfile | 7 +- Dockerfile.ci | 8 +- airflow/models/dag.py | 4 +- airflow/providers/.gitignore | 1 - airflow/providers_manager.py | 6 +- airflow/settings.py | 3 +- contributing-docs/08_static_code_checks.rst | 2 - contributing-docs/testing/system_tests.rst | 2 +- contributing-docs/testing/unit_tests.rst | 13 +- dev/breeze/doc/images/output_build-docs.svg | 28 +- .../doc/images/output_compile-ui-assets.svg | 10 +- ...release-management_add-back-references.svg | 12 +- ...ement_generate-issue-content-providers.svg | 10 +- ...agement_prepare-provider-documentation.svg | 24 +- ...e-management_prepare-provider-packages.svg | 26 +- ...output_release-management_publish-docs.svg | 28 +- ...t_sbom_generate-providers-requirements.svg | 26 +- ...utput_setup_check-all-params-in-groups.svg | 8 +- ...output_setup_regenerate-command-images.svg | 14 +- .../doc/images/output_static-checks.svg | 6 +- .../doc/images/output_static-checks.txt | 2 +- .../commands/kubernetes_commands.py | 2 +- .../commands/testing_commands.py | 2 +- .../src/airflow_breeze/pre_commit_ids.py | 1 - .../prepare_providers/provider_packages.py | 4 +- .../utils/docker_command_utils.py | 3 + .../src/airflow_breeze/utils/packages.py | 4 +- .../src/airflow_breeze/utils/path_utils.py | 8 +- .../utils/provider_dependencies.py | 4 +- .../utils/publish_docs_helpers.py | 24 +- .../src/airflow_breeze/utils/run_tests.py | 35 +- .../airflow_breeze/utils/selective_checks.py | 36 +- dev/breeze/tests/test_packages.py | 17 +- .../tests/test_provider_documentation.py | 2 +- .../tests/test_pytest_args_for_test_types.py | 28 +- dev/breeze/tests/test_run_test_args.py | 18 +- dev/breeze/tests/test_selective_checks.py | 86 +- dev/example_dags/update_example_dags_paths.py | 8 +- dev/perf/scheduler_dag_execution_timing.py | 2 +- .../tests_common/__init__.py | 19 +- .../tests_common/_internals}/__init__.py | 0 .../_internals/capture_warnings.py | 0 .../_internals/forbidden_warnings.py | 21 +- dev/tests_common/pyproject.toml | 38 + dev/tests_common/pytest_plugin.py | 1436 +++++++++++++++++ .../tests_common}/test_utils/README.md | 10 +- .../tests_common/test_utils}/__init__.py | 5 + .../test_utils/api_connexion_utils.py | 9 +- .../tests_common}/test_utils/asserts.py | 0 .../test_utils/azure_system_helpers.py | 5 +- .../tests_common}/test_utils/compat.py | 0 .../tests_common}/test_utils/config.py | 0 {tests => dev/tests_common}/test_utils/db.py | 31 +- .../tests_common}/test_utils/decorators.py | 0 .../tests_common}/test_utils/fake_datetime.py | 0 .../test_utils/gcp_system_helpers.py | 32 +- .../tests_common}/test_utils/get_all_tests.py | 0 .../tests_common}/test_utils/hdfs_utils.py | 0 .../test_utils/logging_command_executor.py | 0 .../tests_common}/test_utils/mapping.py | 0 .../tests_common}/test_utils/mock_executor.py | 0 .../test_utils/mock_operators.py | 3 +- .../tests_common}/test_utils/mock_plugins.py | 0 .../test_utils/mock_security_manager.py | 2 +- .../test_utils/operators}/__init__.py | 0 .../operators/postgres_local_executor.cfg | 0 .../tests_common/test_utils/perf}/__init__.py | 0 .../test_utils/perf/perf_kit/__init__.py | 0 .../test_utils/perf/perf_kit/memory.py | 0 .../test_utils/perf/perf_kit/python.py | 0 .../perf/perf_kit/repeat_and_time.py | 0 .../test_utils/perf/perf_kit/sqlalchemy.py | 0 .../tests_common}/test_utils/permissions.py | 0 .../tests_common}/test_utils/providers.py | 0 .../remote_user_api_auth_backend.py | 0 .../test_utils/reset_warning_registry.py | 0 .../test_utils/salesforce_system_helpers.py | 0 .../test_utils/sftp_system_helpers.py | 0 .../tests_common/test_utils/system_tests.py | 0 .../test_utils/system_tests_class.py | 5 +- .../tests_common}/test_utils/terraform.py | 2 +- .../tests_common}/test_utils/timetables.py | 0 .../tests_common/test_utils}/watcher.py | 0 {tests => dev/tests_common}/test_utils/www.py | 0 .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators/airbyte.rst | 4 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators/analyticdb_spark.rst | 2 +- .../operators/oss.rst | 2 +- .../changelog.rst | 2 +- .../example-dags.rst | 2 +- .../apache-airflow-providers-amazon/index.rst | 2 +- .../operators/appflow.rst | 12 +- .../operators/athena/athena_boto.rst | 6 +- .../operators/athena/athena_sql.rst | 4 +- .../operators/batch.rst | 10 +- .../operators/bedrock.rst | 34 +- .../operators/cloudformation.rst | 8 +- .../operators/comprehend.rst | 8 +- .../operators/datasync.rst | 6 +- .../operators/dms.rst | 12 +- .../operators/dynamodb.rst | 4 +- .../operators/ec2.rst | 14 +- .../operators/ecs.rst | 20 +- .../operators/eks.rst | 26 +- .../operators/emr/emr.rst | 20 +- .../operators/emr/emr_eks.rst | 8 +- .../operators/emr/emr_serverless.rst | 12 +- .../operators/eventbridge.rst | 8 +- .../operators/glue.rst | 20 +- .../operators/glue_databrew.rst | 2 +- .../operators/kinesis_analytics.rst | 10 +- .../operators/lambda.rst | 6 +- .../operators/neptune.rst | 4 +- .../operators/opensearchserverless.rst | 2 +- .../operators/quicksight.rst | 4 +- .../operators/rds.rst | 28 +- .../operators/redshift/redshift_cluster.rst | 14 +- .../operators/redshift/redshift_data.rst | 4 +- .../operators/redshift/redshift_sql.rst | 2 +- .../operators/s3/glacier.rst | 6 +- .../operators/s3/s3.rst | 40 +- .../operators/sagemaker.rst | 46 +- .../operators/sns.rst | 2 +- .../operators/sqs.rst | 4 +- .../operators/step_functions.rst | 6 +- .../transfer/azure_blob_to_s3.rst | 2 +- .../transfer/dynamodb_to_s3.rst | 8 +- .../transfer/ftp_to_s3.rst | 2 +- .../transfer/gcs_to_s3.rst | 2 +- .../transfer/glacier_to_gcs.rst | 2 +- .../transfer/google_api_to_s3.rst | 6 +- .../transfer/hive_to_dynamodb.rst | 2 +- .../transfer/http_to_s3.rst | 2 +- .../transfer/imap_attachment_to_s3.rst | 2 +- .../transfer/local_to_s3.rst | 2 +- .../transfer/mongo_to_s3.rst | 2 +- .../transfer/redshift_to_s3.rst | 2 +- .../transfer/s3_to_dynamodb.rst | 4 +- .../transfer/s3_to_ftp.rst | 2 +- .../transfer/s3_to_redshift.rst | 4 +- .../transfer/s3_to_sftp.rst | 2 +- .../transfer/s3_to_sql.rst | 4 +- .../transfer/salesforce_to_s3.rst | 2 +- .../transfer/sftp_to_s3.rst | 2 +- .../transfer/sql_to_s3.rst | 4 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 26 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 2 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators/index.rst | 4 +- .../sensors.rst | 4 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 4 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 4 +- .../changelog.rst | 2 +- .../decorators/pyspark.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 6 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../operators/index.rst | 8 +- .../changelog.rst | 2 +- docs/apache-airflow-providers-asana/index.rst | 4 +- .../operators/asana.rst | 2 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../kubernetes_executor.rst | 6 +- .../operators.rst | 20 +- .../changelog.rst | 2 +- .../apache-airflow-providers-cohere/index.rst | 2 +- .../operators/embedding.rst | 2 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 6 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators/copy_into.rst | 2 +- .../operators/jobs_create.rst | 6 +- .../operators/notebook.rst | 4 +- .../operators/repos_create.rst | 2 +- .../operators/repos_delete.rst | 2 +- .../operators/repos_update.rst | 2 +- .../operators/sql.rst | 16 +- .../operators/submit_run.rst | 4 +- .../operators/task.rst | 4 +- .../operators/workflow.rst | 2 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 12 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 8 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../decorators/docker.rst | 2 +- .../apache-airflow-providers-docker/index.rst | 4 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../connections/elasticsearch.rst | 2 +- .../hooks/elasticsearch_python_hook.rst | 2 +- .../hooks/elasticsearch_sql_hook.rst | 2 +- .../index.rst | 2 +- .../changelog.rst | 2 +- .../auth-manager/access-control.rst | 8 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- docs/apache-airflow-providers-ftp/index.rst | 6 +- .../operators/index.rst | 8 +- .../changelog.rst | 2 +- .../apache-airflow-providers-github/index.rst | 4 +- .../operators/index.rst | 8 +- .../changelog.rst | 2 +- .../example-dags.rst | 10 +- .../apache-airflow-providers-google/index.rst | 2 +- .../operators/ads.rst | 4 +- .../operators/cloud/automl.rst | 31 +- .../operators/cloud/bigquery.rst | 74 +- .../operators/cloud/bigquery_dts.rst | 10 +- .../operators/cloud/bigtable.rst | 14 +- .../operators/cloud/cloud_batch.rst | 12 +- .../operators/cloud/cloud_build.rst | 42 +- .../operators/cloud/cloud_composer.rst | 30 +- .../operators/cloud/cloud_memorystore.rst | 26 +- .../cloud/cloud_memorystore_memcached.rst | 14 +- .../operators/cloud/cloud_run.rst | 24 +- .../operators/cloud/cloud_sql.rst | 70 +- .../cloud/cloud_storage_transfer_service.rst | 50 +- .../operators/cloud/compute.rst | 66 +- .../operators/cloud/compute_ssh.rst | 4 +- .../operators/cloud/data_loss_prevention.rst | 22 +- .../operators/cloud/datacatalog.rst | 64 +- .../operators/cloud/dataflow.rst | 50 +- .../operators/cloud/dataform.rst | 34 +- .../operators/cloud/datafusion.rst | 26 +- .../operators/cloud/datapipeline.rst | 4 +- .../operators/cloud/dataplex.rst | 66 +- .../operators/cloud/dataprep.rst | 14 +- .../operators/cloud/dataproc.rst | 86 +- .../operators/cloud/dataproc_metastore.rst | 30 +- .../operators/cloud/datastore.rst | 26 +- .../operators/cloud/functions.rst | 16 +- .../operators/cloud/gcs.rst | 30 +- .../operators/cloud/index.rst | 2 +- .../operators/cloud/kubernetes_engine.rst | 38 +- .../operators/cloud/life_sciences.rst | 6 +- .../operators/cloud/looker.rst | 4 +- .../operators/cloud/mlengine.rst | 22 +- .../operators/cloud/natural_language.rst | 20 +- .../operators/cloud/pubsub.rst | 20 +- .../operators/cloud/spanner.rst | 26 +- .../operators/cloud/speech_to_text.rst | 8 +- .../operators/cloud/stackdriver.rst | 20 +- .../operators/cloud/tasks.rst | 26 +- .../operators/cloud/text_to_speech.rst | 8 +- .../operators/cloud/translate.rst | 6 +- .../operators/cloud/translate_speech.rst | 6 +- .../operators/cloud/vertex_ai.rst | 116 +- .../operators/cloud/video_intelligence.rst | 24 +- .../operators/cloud/vision.rst | 158 +- .../operators/cloud/workflows.rst | 22 +- .../operators/firebase/firestore.rst | 2 +- .../operators/leveldb/leveldb.rst | 2 +- .../marketing_platform/analytics_admin.rst | 14 +- .../marketing_platform/campaign_manager.rst | 14 +- .../marketing_platform/display_video.rst | 20 +- .../marketing_platform/search_ads.rst | 10 +- .../operators/suite/sheets.rst | 4 +- .../operators/transfer/azure_blob_to_gcs.rst | 2 +- .../transfer/azure_fileshare_to_gcs.rst | 2 +- .../transfer/bigquery_to_bigquery.rst | 2 +- .../operators/transfer/bigquery_to_gcs.rst | 2 +- .../operators/transfer/bigquery_to_mssql.rst | 2 +- .../operators/transfer/bigquery_to_mysql.rst | 2 +- .../transfer/bigquery_to_postgres.rst | 4 +- .../operators/transfer/calendar_to_gcs.rst | 2 +- .../transfer/facebook_ads_to_gcs.rst | 2 +- .../operators/transfer/gcs_to_bigquery.rst | 4 +- .../operators/transfer/gcs_to_gcs.rst | 20 +- .../operators/transfer/gcs_to_gdrive.rst | 8 +- .../operators/transfer/gcs_to_local.rst | 2 +- .../operators/transfer/gcs_to_sftp.rst | 8 +- .../operators/transfer/gcs_to_sheets.rst | 2 +- .../operators/transfer/gdrive_to_gcs.rst | 2 +- .../operators/transfer/gdrive_to_local.rst | 2 +- .../operators/transfer/local_to_drive.rst | 2 +- .../operators/transfer/local_to_gcs.rst | 2 +- .../operators/transfer/mssql_to_gcs.rst | 2 +- .../operators/transfer/mysql_to_gcs.rst | 2 +- .../operators/transfer/oracle_to_gcs.rst | 2 +- .../operators/transfer/postgres_to_gcs.rst | 2 +- .../operators/transfer/presto_to_gcs.rst | 10 +- .../operators/transfer/s3_to_gcs.rst | 4 +- .../operators/transfer/salesforce_to_gcs.rst | 2 +- .../operators/transfer/sftp_to_gcs.rst | 8 +- .../operators/transfer/sheets_to_gcs.rst | 2 +- .../operators/transfer/sql_to_sheets.rst | 2 +- .../operators/transfer/trino_to_gcs.rst | 10 +- .../sensors/google-cloud-tasks.rst | 2 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- docs/apache-airflow-providers-http/index.rst | 4 +- .../operators.rst | 18 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators/index.rst | 2 +- .../changelog.rst | 2 +- docs/apache-airflow-providers-jdbc/index.rst | 4 +- .../operators.rst | 4 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators/adf_run_pipeline.rst | 10 +- .../operators/adls.rst | 6 +- .../operators/asb.rst | 20 +- .../operators/azure_synapse.rst | 4 +- .../operators/batch.rst | 2 +- .../operators/msgraph.rst | 12 +- .../sensors/cosmos_document_sensor.rst | 2 +- .../sensors/msgraph.rst | 2 +- .../sensors/wasb_sensors.rst | 4 +- .../transfer/local_to_adls.rst | 2 +- .../transfer/local_to_wasb.rst | 2 +- .../transfer/s3_to_wasb.rst | 2 +- .../transfer/sftp_to_wasb.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 12 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 4 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- docs/apache-airflow-providers-mysql/index.rst | 4 +- .../operators.rst | 4 +- .../changelog.rst | 2 +- docs/apache-airflow-providers-neo4j/index.rst | 4 +- .../operators/neo4j.rst | 2 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../apache-airflow-providers-openai/index.rst | 2 +- .../operators/openai.rst | 4 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../guides/developer.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 2 +- .../operators/opensearch.rst | 6 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../notifications/opsgenie_notifier.rst | 2 +- .../operators/opsgenie_alert.rst | 6 +- .../changelog.rst | 2 +- .../operators/index.rst | 6 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 6 +- .../changelog.rst | 2 +- .../index.rst | 2 +- .../operators/pgvector.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 2 +- .../operators/pinecone.rst | 6 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../postgres_operator_howto_guide.rst | 6 +- .../changelog.rst | 2 +- .../apache-airflow-providers-presto/index.rst | 4 +- .../operators/transfer/gcs_to_presto.rst | 2 +- .../changelog.rst | 2 +- .../apache-airflow-providers-qdrant/index.rst | 2 +- .../operators/qdrant.rst | 2 +- .../changelog.rst | 2 +- docs/apache-airflow-providers-redis/index.rst | 4 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators/bulk.rst | 8 +- .../operators/salesforce_apex_rest.rst | 2 +- .../changelog.rst | 2 +- docs/apache-airflow-providers-samba/index.rst | 2 +- .../transfer/gcs_to_samba.rst | 8 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- docs/apache-airflow-providers-sftp/index.rst | 2 +- .../sensors/sftp_sensor.rst | 6 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../changelog.rst | 2 +- docs/apache-airflow-providers-slack/index.rst | 4 +- .../operators/slack_api.rst | 8 +- .../operators/slack_webhook.rst | 4 +- .../operators/sql_to_slack.rst | 2 +- .../operators/sql_to_slack_webhook.rst | 2 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../decorators/snowpark.rst | 2 +- .../index.rst | 4 +- .../operators/copy_into_snowflake.rst | 2 +- .../operators/snowflake.rst | 4 +- .../operators/snowpark.rst | 2 +- .../changelog.rst | 2 +- .../apache-airflow-providers-sqlite/index.rst | 4 +- .../operators.rst | 4 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 4 +- .../operators.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 2 +- .../operators/azure_blob_to_teradata.rst | 14 +- .../operators/compute_cluster.rst | 8 +- .../operators/s3_to_teradata.rst | 8 +- .../operators/teradata.rst | 26 +- .../operators/teradata_to_teradata.rst | 4 +- .../changelog.rst | 2 +- docs/apache-airflow-providers-trino/index.rst | 4 +- .../operators/transfer/gcs_to_trino.rst | 2 +- .../operators/trino.rst | 2 +- .../changelog.rst | 2 +- .../changelog.rst | 2 +- .../index.rst | 2 +- .../operators/weaviate.rst | 8 +- .../changelog.rst | 2 +- .../apache-airflow-providers-yandex/index.rst | 4 +- .../operators/dataproc.rst | 2 +- .../operators/yq.rst | 2 +- .../changelog.rst | 2 +- docs/apache-airflow-providers-ydb/index.rst | 4 +- .../operators/ydb_operator_howto_guide.rst | 4 +- .../changelog.rst | 2 +- .../index.rst | 4 +- docs/apache-airflow/tutorial/taskflow.rst | 4 +- docs/build_docs.py | 5 +- docs/conf.py | 101 +- docs/exts/docs_build/code_utils.py | 1 - docs/exts/exampleinclude.py | 13 +- docs/exts/provider_init_hack.py | 56 - docs/exts/provider_yaml_utils.py | 16 +- docs/exts/providers_extensions.py | 27 +- generated/provider_dependencies.json | 7 +- providers/.gitignore | 7 + {tests/test_utils => providers}/__init__.py | 13 +- providers/pyproject.toml | 98 ++ providers/src/airflow/providers/.gitignore | 1 + .../MANAGING_PROVIDERS_LIFECYCLE.rst | 12 +- .../airbyte/.latest-doc-only-change.txt | 0 .../airflow}/providers/airbyte/CHANGELOG.rst | 0 .../airflow}/providers/airbyte/__init__.py | 0 .../providers/airbyte/hooks/__init__.py | 0 .../providers/airbyte/hooks/airbyte.py | 0 .../providers/airbyte/operators/__init__.py | 0 .../providers/airbyte/operators/airbyte.py | 0 .../airflow}/providers/airbyte/provider.yaml | 0 .../providers/airbyte/sensors}/__init__.py | 0 .../providers/airbyte/sensors/airbyte.py | 0 .../providers/airbyte/triggers}/__init__.py | 0 .../providers/airbyte/triggers/airbyte.py | 0 .../alibaba/.latest-doc-only-change.txt | 0 .../airflow}/providers/alibaba/CHANGELOG.rst | 0 .../airflow}/providers/alibaba/__init__.py | 0 .../providers/alibaba/cloud/__init__.py | 0 .../providers/alibaba/cloud/hooks/__init__.py | 0 .../alibaba/cloud/hooks/analyticdb_spark.py | 0 .../providers/alibaba/cloud/hooks/oss.py | 0 .../providers/alibaba/cloud/log}/__init__.py | 0 .../alibaba/cloud/log/oss_task_handler.py | 0 .../alibaba/cloud/operators}/__init__.py | 0 .../cloud/operators/analyticdb_spark.py | 0 .../providers/alibaba/cloud/operators/oss.py | 0 .../alibaba/cloud/sensors}/__init__.py | 0 .../alibaba/cloud/sensors/analyticdb_spark.py | 0 .../alibaba/cloud/sensors/oss_key.py | 0 .../airflow}/providers/alibaba/provider.yaml | 0 .../amazon/.latest-doc-only-change.txt | 0 .../airflow}/providers/amazon/CHANGELOG.rst | 0 .../src/airflow}/providers/amazon/__init__.py | 0 .../airflow/providers/amazon/aws}/__init__.py | 0 .../providers/amazon/aws/assets}/__init__.py | 0 .../providers/amazon/aws/assets/s3.py | 0 .../amazon/aws/auth_manager}/__init__.py | 0 .../amazon/aws/auth_manager/avp}/__init__.py | 0 .../amazon/aws/auth_manager/avp/entities.py | 0 .../amazon/aws/auth_manager/avp/facade.py | 0 .../amazon/aws/auth_manager/avp/schema.json | 0 .../aws/auth_manager/aws_auth_manager.py | 0 .../amazon/aws/auth_manager/cli}/__init__.py | 0 .../aws/auth_manager/cli/avp_commands.py | 0 .../amazon/aws/auth_manager/cli/definition.py | 0 .../amazon/aws/auth_manager/constants.py | 0 .../security_manager}/__init__.py | 0 .../aws_security_manager_override.py | 0 .../providers/amazon/aws/auth_manager/user.py | 0 .../aws/auth_manager/views}/__init__.py | 0 .../amazon/aws/auth_manager/views/auth.py | 0 .../providers/amazon/aws/exceptions.py | 0 .../providers/amazon/aws/executors/Dockerfile | 0 .../amazon/aws/executors}/__init__.py | 0 .../amazon/aws/executors/batch/__init__.py | 0 .../aws/executors/batch/batch_executor.py | 0 .../executors/batch/batch_executor_config.py | 0 .../amazon/aws/executors/batch/boto_schema.py | 0 .../amazon/aws/executors/batch/utils.py | 0 .../amazon/aws/executors/ecs/__init__.py | 0 .../amazon/aws/executors/ecs/boto_schema.py | 0 .../amazon/aws/executors/ecs/ecs_executor.py | 0 .../aws/executors/ecs/ecs_executor_config.py | 0 .../amazon/aws/executors/ecs/utils.py | 0 .../amazon/aws/executors/utils}/__init__.py | 0 .../aws/executors/utils/base_config_keys.py | 0 .../utils/exponential_backoff_retry.py | 0 .../providers/amazon/aws/fs}/__init__.py | 0 .../airflow}/providers/amazon/aws/fs/s3.py | 0 .../providers/amazon/aws/hooks}/__init__.py | 0 .../providers/amazon/aws/hooks/appflow.py | 0 .../providers/amazon/aws/hooks/athena.py | 0 .../providers/amazon/aws/hooks/athena_sql.py | 0 .../providers/amazon/aws/hooks/base_aws.py | 0 .../amazon/aws/hooks/batch_client.py | 0 .../amazon/aws/hooks/batch_waiters.json | 0 .../amazon/aws/hooks/batch_waiters.py | 0 .../providers/amazon/aws/hooks/bedrock.py | 0 .../providers/amazon/aws/hooks/chime.py | 0 .../amazon/aws/hooks/cloud_formation.py | 0 .../providers/amazon/aws/hooks/comprehend.py | 0 .../providers/amazon/aws/hooks/datasync.py | 0 .../providers/amazon/aws/hooks/dms.py | 0 .../providers/amazon/aws/hooks/dynamodb.py | 0 .../providers/amazon/aws/hooks/ec2.py | 0 .../providers/amazon/aws/hooks/ecr.py | 0 .../providers/amazon/aws/hooks/ecs.py | 0 .../providers/amazon/aws/hooks/eks.py | 0 .../hooks/elasticache_replication_group.py | 0 .../providers/amazon/aws/hooks/emr.py | 0 .../providers/amazon/aws/hooks/eventbridge.py | 0 .../providers/amazon/aws/hooks/glacier.py | 0 .../providers/amazon/aws/hooks/glue.py | 0 .../amazon/aws/hooks/glue_catalog.py | 0 .../amazon/aws/hooks/glue_crawler.py | 0 .../amazon/aws/hooks/glue_databrew.py | 0 .../providers/amazon/aws/hooks/kinesis.py | 0 .../amazon/aws/hooks/kinesis_analytics.py | 0 .../amazon/aws/hooks/lambda_function.py | 0 .../providers/amazon/aws/hooks/logs.py | 0 .../providers/amazon/aws/hooks/neptune.py | 0 .../amazon/aws/hooks/opensearch_serverless.py | 0 .../providers/amazon/aws/hooks/quicksight.py | 0 .../providers/amazon/aws/hooks/rds.py | 0 .../amazon/aws/hooks/redshift_cluster.py | 0 .../amazon/aws/hooks/redshift_data.py | 0 .../amazon/aws/hooks/redshift_sql.py | 0 .../airflow}/providers/amazon/aws/hooks/s3.py | 0 .../providers/amazon/aws/hooks/sagemaker.py | 0 .../amazon/aws/hooks/secrets_manager.py | 0 .../providers/amazon/aws/hooks/ses.py | 0 .../providers/amazon/aws/hooks/sns.py | 0 .../providers/amazon/aws/hooks/sqs.py | 0 .../providers/amazon/aws/hooks/ssm.py | 0 .../amazon/aws/hooks/step_function.py | 0 .../providers/amazon/aws/hooks/sts.py | 0 .../amazon/aws/hooks/verified_permissions.py | 0 .../providers/amazon/aws/links}/__init__.py | 0 .../providers/amazon/aws/links/athena.py | 0 .../providers/amazon/aws/links/base_aws.py | 0 .../providers/amazon/aws/links/batch.py | 0 .../providers/amazon/aws/links/emr.py | 0 .../providers/amazon/aws/links/glue.py | 0 .../providers/amazon/aws/links/logs.py | 0 .../amazon/aws/links/step_function.py | 0 .../providers/amazon/aws/log}/__init__.py | 0 .../amazon/aws/log/cloudwatch_task_handler.py | 0 .../amazon/aws/log/s3_task_handler.py | 0 .../amazon/aws/notifications}/__init__.py | 0 .../amazon/aws/notifications/chime.py | 0 .../providers/amazon/aws/notifications/sns.py | 0 .../providers/amazon/aws/notifications/sqs.py | 0 .../amazon/aws/operators}/__init__.py | 0 .../providers/amazon/aws/operators/appflow.py | 0 .../providers/amazon/aws/operators/athena.py | 0 .../amazon/aws/operators/base_aws.py | 0 .../providers/amazon/aws/operators/batch.py | 0 .../providers/amazon/aws/operators/bedrock.py | 0 .../amazon/aws/operators/cloud_formation.py | 0 .../amazon/aws/operators/comprehend.py | 0 .../amazon/aws/operators/datasync.py | 0 .../providers/amazon/aws/operators/dms.py | 0 .../providers/amazon/aws/operators/ec2.py | 0 .../providers/amazon/aws/operators/ecs.py | 0 .../providers/amazon/aws/operators/eks.py | 0 .../providers/amazon/aws/operators/emr.py | 0 .../amazon/aws/operators/eventbridge.py | 0 .../providers/amazon/aws/operators/glacier.py | 0 .../providers/amazon/aws/operators/glue.py | 0 .../amazon/aws/operators/glue_crawler.py | 0 .../amazon/aws/operators/glue_databrew.py | 0 .../amazon/aws/operators/kinesis_analytics.py | 0 .../amazon/aws/operators/lambda_function.py | 0 .../providers/amazon/aws/operators/neptune.py | 0 .../amazon/aws/operators/quicksight.py | 0 .../providers/amazon/aws/operators/rds.py | 0 .../amazon/aws/operators/redshift_cluster.py | 0 .../amazon/aws/operators/redshift_data.py | 0 .../providers/amazon/aws/operators/s3.py | 0 .../amazon/aws/operators/sagemaker.py | 0 .../providers/amazon/aws/operators/sns.py | 0 .../providers/amazon/aws/operators/sqs.py | 0 .../amazon/aws/operators/step_function.py | 0 .../providers/amazon/aws/secrets}/__init__.py | 0 .../amazon/aws/secrets/secrets_manager.py | 0 .../amazon/aws/secrets/systems_manager.py | 0 .../providers/amazon/aws/sensors}/__init__.py | 0 .../providers/amazon/aws/sensors/athena.py | 0 .../providers/amazon/aws/sensors/base_aws.py | 0 .../providers/amazon/aws/sensors/batch.py | 0 .../providers/amazon/aws/sensors/bedrock.py | 0 .../amazon/aws/sensors/cloud_formation.py | 0 .../amazon/aws/sensors/comprehend.py | 0 .../providers/amazon/aws/sensors/dms.py | 0 .../providers/amazon/aws/sensors/dynamodb.py | 0 .../providers/amazon/aws/sensors/ec2.py | 0 .../providers/amazon/aws/sensors/ecs.py | 0 .../providers/amazon/aws/sensors/eks.py | 0 .../providers/amazon/aws/sensors/emr.py | 0 .../providers/amazon/aws/sensors/glacier.py | 0 .../providers/amazon/aws/sensors/glue.py | 0 .../aws/sensors/glue_catalog_partition.py | 0 .../amazon/aws/sensors/glue_crawler.py | 0 .../amazon/aws/sensors/kinesis_analytics.py | 0 .../amazon/aws/sensors/lambda_function.py | 0 .../aws/sensors/opensearch_serverless.py | 0 .../amazon/aws/sensors/quicksight.py | 0 .../providers/amazon/aws/sensors/rds.py | 0 .../amazon/aws/sensors/redshift_cluster.py | 0 .../providers/amazon/aws/sensors/s3.py | 0 .../providers/amazon/aws/sensors/sagemaker.py | 0 .../providers/amazon/aws/sensors/sqs.py | 0 .../amazon/aws/sensors/step_function.py | 0 .../amazon/aws/transfers}/__init__.py | 0 .../amazon/aws/transfers/azure_blob_to_s3.py | 0 .../providers/amazon/aws/transfers/base.py | 0 .../amazon/aws/transfers/dynamodb_to_s3.py | 0 .../amazon/aws/transfers/exasol_to_s3.py | 0 .../amazon/aws/transfers/ftp_to_s3.py | 0 .../amazon/aws/transfers/gcs_to_s3.py | 0 .../amazon/aws/transfers/glacier_to_gcs.py | 0 .../amazon/aws/transfers/google_api_to_s3.py | 0 .../amazon/aws/transfers/hive_to_dynamodb.py | 0 .../amazon/aws/transfers/http_to_s3.py | 0 .../aws/transfers/imap_attachment_to_s3.py | 0 .../amazon/aws/transfers/local_to_s3.py | 0 .../amazon/aws/transfers/mongo_to_s3.py | 0 .../amazon/aws/transfers/redshift_to_s3.py | 0 .../amazon/aws/transfers/s3_to_dynamodb.py | 0 .../amazon/aws/transfers/s3_to_ftp.py | 0 .../amazon/aws/transfers/s3_to_redshift.py | 0 .../amazon/aws/transfers/s3_to_sftp.py | 0 .../amazon/aws/transfers/s3_to_sql.py | 0 .../amazon/aws/transfers/salesforce_to_s3.py | 0 .../amazon/aws/transfers/sftp_to_s3.py | 0 .../amazon/aws/transfers/sql_to_s3.py | 0 .../providers/amazon/aws/triggers/README.md | 0 .../providers/amazon/aws/triggers/__init__.py | 0 .../providers/amazon/aws/triggers/athena.py | 0 .../providers/amazon/aws/triggers/base.py | 0 .../providers/amazon/aws/triggers/batch.py | 0 .../providers/amazon/aws/triggers/bedrock.py | 0 .../amazon/aws/triggers/comprehend.py | 0 .../providers/amazon/aws/triggers/ec2.py | 0 .../providers/amazon/aws/triggers/ecs.py | 12 +- .../providers/amazon/aws/triggers/eks.py | 0 .../providers/amazon/aws/triggers/emr.py | 0 .../providers/amazon/aws/triggers/glue.py | 0 .../amazon/aws/triggers/glue_crawler.py | 0 .../amazon/aws/triggers/glue_databrew.py | 0 .../amazon/aws/triggers/kinesis_analytics.py | 0 .../amazon/aws/triggers/lambda_function.py | 0 .../providers/amazon/aws/triggers/neptune.py | 0 .../aws/triggers/opensearch_serverless.py | 0 .../providers/amazon/aws/triggers/rds.py | 0 .../amazon/aws/triggers/redshift_cluster.py | 0 .../amazon/aws/triggers/redshift_data.py | 0 .../providers/amazon/aws/triggers/s3.py | 0 .../amazon/aws/triggers/sagemaker.py | 0 .../providers/amazon/aws/triggers/sqs.py | 0 .../amazon/aws/triggers/step_function.py | 0 .../providers/amazon/aws/utils/__init__.py | 0 .../utils/asset_compat_lineage_collector.py | 0 .../amazon/aws/utils/connection_wrapper.py | 0 .../amazon/aws/utils/eks_get_token.py | 0 .../providers/amazon/aws/utils/emailer.py | 0 .../providers/amazon/aws/utils/identifiers.py | 0 .../providers/amazon/aws/utils/mixins.py | 0 .../providers/amazon/aws/utils/openlineage.py | 0 .../providers/amazon/aws/utils/rds.py | 0 .../providers/amazon/aws/utils/redshift.py | 0 .../providers/amazon/aws/utils/sagemaker.py | 0 .../providers/amazon/aws/utils/sqs.py | 0 .../providers/amazon/aws/utils/suppress.py | 0 .../providers/amazon/aws/utils/tags.py | 0 .../amazon/aws/utils/task_log_fetcher.py | 0 .../providers/amazon/aws/utils/waiter.py | 0 .../amazon/aws/utils/waiter_with_logging.py | 0 .../providers/amazon/aws/waiters/README.md | 0 .../providers/amazon/aws/waiters}/__init__.py | 0 .../providers/amazon/aws/waiters/appflow.json | 0 .../providers/amazon/aws/waiters/athena.json | 0 .../amazon/aws/waiters/base_waiter.py | 0 .../providers/amazon/aws/waiters/batch.json | 0 .../amazon/aws/waiters/bedrock-agent.json | 0 .../providers/amazon/aws/waiters/bedrock.json | 0 .../amazon/aws/waiters/comprehend.json | 0 .../amazon/aws/waiters/databrew.json | 0 .../amazon/aws/waiters/dynamodb.json | 0 .../providers/amazon/aws/waiters/ecs.json | 0 .../providers/amazon/aws/waiters/eks.json | 0 .../amazon/aws/waiters/emr-containers.json | 0 .../amazon/aws/waiters/emr-serverless.json | 0 .../providers/amazon/aws/waiters/emr.json | 0 .../providers/amazon/aws/waiters/glue.json | 0 .../aws/waiters/kinesisanalyticsv2.json | 0 .../providers/amazon/aws/waiters/neptune.json | 0 .../aws/waiters/opensearchserverless.json | 0 .../providers/amazon/aws/waiters/rds.json | 0 .../amazon/aws/waiters/redshift.json | 0 .../amazon/aws/waiters/sagemaker.json | 0 .../amazon/aws/waiters/stepfunctions.json | 0 .../airflow}/providers/amazon/provider.yaml | 0 .../apache/beam/.latest-doc-only-change.txt | 0 .../providers/apache/beam/CHANGELOG.rst | 0 .../airflow}/providers/apache/beam/README.md | 0 .../providers/apache/beam/__init__.py | 0 .../providers/apache/beam/hooks/__init__.py | 0 .../providers/apache/beam/hooks/beam.py | 0 .../apache/beam/operators/__init__.py | 0 .../providers/apache/beam/operators/beam.py | 0 .../providers/apache/beam/provider.yaml | 0 .../apache/beam/triggers}/__init__.py | 0 .../providers/apache/beam/triggers/beam.py | 0 .../cassandra/.latest-doc-only-change.txt | 0 .../providers/apache/cassandra/CHANGELOG.rst | 0 .../providers/apache/cassandra/__init__.py | 0 .../apache/cassandra/hooks}/__init__.py | 0 .../apache/cassandra/hooks/cassandra.py | 0 .../providers/apache/cassandra/provider.yaml | 0 .../apache/cassandra/sensors}/__init__.py | 0 .../apache/cassandra/sensors/record.py | 0 .../apache/cassandra/sensors/table.py | 0 .../apache/drill/.latest-doc-only-change.txt | 0 .../providers/apache/drill/CHANGELOG.rst | 0 .../providers/apache/drill/__init__.py | 0 .../providers/apache/drill/hooks/__init__.py | 0 .../providers/apache/drill/hooks/drill.py | 0 .../apache/drill/operators/__init__.py | 0 .../providers/apache/drill/operators/drill.py | 0 .../providers/apache/drill/provider.yaml | 0 .../apache/druid/.latest-doc-only-change.txt | 0 .../providers/apache/druid/CHANGELOG.rst | 0 .../providers/apache/druid/__init__.py | 0 .../providers/apache/druid/hooks/__init__.py | 0 .../providers/apache/druid/hooks/druid.py | 0 .../apache/druid/operators/__init__.py | 0 .../providers/apache/druid/operators/druid.py | 0 .../apache/druid/operators/druid_check.py | 0 .../providers/apache/druid/provider.yaml | 0 .../apache/druid}/transfers/__init__.py | 0 .../apache/druid/transfers/hive_to_druid.py | 0 .../apache/flink/.latest-doc-only-change.txt | 0 .../providers/apache/flink/CHANGELOG.rst | 0 .../providers/apache/flink/__init__.py | 0 .../providers/apache/flink/hooks/__init__.py | 0 .../apache/flink/operators/__init__.py | 0 .../flink/operators/flink_kubernetes.py | 0 .../providers/apache/flink/provider.yaml | 0 .../apache/flink/sensors/__init__.py | 0 .../apache/flink/sensors/flink_kubernetes.py | 0 .../apache/hdfs/.latest-doc-only-change.txt | 0 .../providers/apache/hdfs/CHANGELOG.rst | 0 .../providers/apache/hdfs/__init__.py | 0 .../providers/apache/hdfs/hooks/__init__.py | 0 .../providers/apache/hdfs/hooks/hdfs.py | 0 .../providers/apache/hdfs/hooks/webhdfs.py | 0 .../providers/apache/hdfs/log}/__init__.py | 0 .../apache/hdfs/log/hdfs_task_handler.py | 0 .../providers/apache/hdfs/provider.yaml | 0 .../providers/apache/hdfs/sensors/__init__.py | 0 .../providers/apache/hdfs/sensors/hdfs.py | 0 .../providers/apache/hdfs/sensors/web_hdfs.py | 0 .../apache/hive/.latest-doc-only-change.txt | 0 .../providers/apache/hive/CHANGELOG.rst | 0 .../providers/apache/hive/__init__.py | 0 .../providers/apache/hive/hooks/__init__.py | 0 .../providers/apache/hive/hooks/hive.py | 10 +- .../providers/apache/hive/macros/__init__.py | 0 .../providers/apache/hive/macros/hive.py | 0 .../apache/hive/operators/__init__.py | 0 .../providers/apache/hive/operators/hive.py | 0 .../apache/hive/operators/hive_stats.py | 0 .../providers/apache/hive/plugins/__init__.py | 0 .../providers/apache/hive/plugins/hive.py | 0 .../providers/apache/hive/provider.yaml | 0 .../providers/apache/hive/sensors/__init__.py | 0 .../apache/hive/sensors/hive_partition.py | 0 .../hive/sensors/metastore_partition.py | 0 .../hive/sensors/named_hive_partition.py | 0 .../apache/hive/transfers}/__init__.py | 0 .../apache/hive/transfers/hive_to_mysql.py | 0 .../apache/hive/transfers/hive_to_samba.py | 0 .../apache/hive/transfers/mssql_to_hive.py | 0 .../apache/hive/transfers/mysql_to_hive.py | 0 .../apache/hive/transfers/s3_to_hive.py | 10 +- .../apache/hive/transfers/vertica_to_hive.py | 0 .../iceberg/.latest-doc-only-change.txt | 0 .../providers/apache/iceberg/CHANGELOG.rst | 0 .../providers/apache/iceberg/__init__.py | 0 .../apache/iceberg/hooks/__init__.py | 0 .../providers/apache/iceberg/hooks/iceberg.py | 0 .../providers/apache/iceberg/provider.yaml | 0 .../apache/impala/.latest-doc-only-change.txt | 0 .../providers/apache/impala/CHANGELOG.rst | 0 .../providers/apache/impala/__init__.py | 0 .../providers/apache/impala/hooks/__init__.py | 0 .../providers/apache/impala/hooks/impala.py | 0 .../providers/apache/impala/provider.yaml | 0 .../apache/kafka/.latest-doc-only-change.txt | 0 .../providers/apache/kafka/CHANGELOG.rst | 0 .../providers/apache/kafka/__init__.py | 0 .../providers/apache/kafka/hooks}/__init__.py | 0 .../providers/apache/kafka/hooks/base.py | 0 .../providers/apache/kafka/hooks/client.py | 0 .../providers/apache/kafka/hooks/consume.py | 0 .../providers/apache/kafka/hooks/produce.py | 0 .../apache/kafka/operators}/__init__.py | 0 .../apache/kafka/operators/consume.py | 0 .../apache/kafka/operators/produce.py | 0 .../providers/apache/kafka/provider.yaml | 0 .../apache/kafka/sensors}/__init__.py | 0 .../providers/apache/kafka/sensors/kafka.py | 0 .../apache/kafka/triggers}/__init__.py | 0 .../apache/kafka/triggers/await_message.py | 0 .../apache/kylin/.latest-doc-only-change.txt | 0 .../providers/apache/kylin/CHANGELOG.rst | 0 .../providers/apache/kylin/__init__.py | 0 .../providers/apache/kylin}/hooks/__init__.py | 0 .../providers/apache/kylin/hooks/kylin.py | 0 .../apache/kylin}/operators/__init__.py | 0 .../apache/kylin/operators/kylin_cube.py | 0 .../providers/apache/kylin/provider.yaml | 0 .../apache/livy/.latest-doc-only-change.txt | 0 .../providers/apache/livy/CHANGELOG.rst | 0 .../providers/apache/livy/__init__.py | 0 .../providers/apache/livy/hooks}/__init__.py | 0 .../providers/apache/livy/hooks/livy.py | 0 .../apache/livy/operators}/__init__.py | 0 .../providers/apache/livy/operators/livy.py | 0 .../providers/apache/livy/provider.yaml | 0 .../apache/livy/sensors}/__init__.py | 0 .../providers/apache/livy/sensors/livy.py | 0 .../apache/livy/triggers}/__init__.py | 0 .../providers/apache/livy/triggers/livy.py | 0 .../apache/pig/.latest-doc-only-change.txt | 0 .../providers/apache/pig/CHANGELOG.rst | 0 .../airflow}/providers/apache/pig/__init__.py | 0 .../providers/apache/pig/hooks/__init__.py | 0 .../providers/apache/pig/hooks/pig.py | 0 .../apache/pig/operators/__init__.py | 0 .../providers/apache/pig/operators/pig.py | 0 .../providers/apache/pig/provider.yaml | 0 .../apache/pinot/.latest-doc-only-change.txt | 0 .../providers/apache/pinot/CHANGELOG.rst | 0 .../providers/apache/pinot/__init__.py | 0 .../providers/apache/pinot/hooks/__init__.py | 0 .../providers/apache/pinot/hooks/pinot.py | 0 .../providers/apache/pinot/provider.yaml | 0 .../apache/spark/.latest-doc-only-change.txt | 0 .../providers/apache/spark/CHANGELOG.rst | 0 .../providers/apache/spark/__init__.py | 0 .../apache/spark/decorators/__init__.py | 0 .../apache/spark/decorators/pyspark.py | 0 .../providers/apache/spark/hooks/__init__.py | 0 .../apache/spark/hooks/spark_connect.py | 0 .../apache/spark/hooks/spark_jdbc.py | 0 .../apache/spark/hooks/spark_jdbc_script.py | 0 .../providers/apache/spark/hooks/spark_sql.py | 0 .../apache/spark/hooks/spark_submit.py | 0 .../apache/spark/operators/__init__.py | 0 .../apache/spark/operators/spark_jdbc.py | 0 .../apache/spark/operators/spark_sql.py | 0 .../apache/spark/operators/spark_submit.py | 0 .../providers/apache/spark/provider.yaml | 0 .../apprise/.latest-doc-only-change.txt | 0 .../airflow}/providers/apprise/CHANGELOG.rst | 0 .../airflow}/providers/apprise/__init__.py | 0 .../providers/apprise/hooks/__init__.py | 0 .../providers/apprise/hooks/apprise.py | 0 .../apprise/notifications/__init__.py | 0 .../apprise/notifications/apprise.py | 0 .../airflow}/providers/apprise/provider.yaml | 0 .../arangodb/.latest-doc-only-change.txt | 0 .../airflow}/providers/arangodb/CHANGELOG.rst | 0 .../airflow}/providers/arangodb/__init__.py | 0 .../arangodb/example_dags}/__init__.py | 0 .../arangodb/example_dags/example_arangodb.py | 0 .../providers/arangodb/hooks}/__init__.py | 0 .../providers/arangodb/hooks/arangodb.py | 0 .../providers/arangodb/operators}/__init__.py | 0 .../providers/arangodb/operators/arangodb.py | 0 .../airflow}/providers/arangodb/provider.yaml | 0 .../providers/arangodb/sensors}/__init__.py | 0 .../providers/arangodb/sensors/arangodb.py | 0 .../asana/.latest-doc-only-change.txt | 0 .../airflow}/providers/asana/CHANGELOG.rst | 0 .../src/airflow}/providers/asana/README.md | 0 .../src/airflow}/providers/asana/__init__.py | 0 .../providers/asana/hooks/__init__.py | 0 .../airflow}/providers/asana/hooks/asana.py | 0 .../providers/asana/operators/__init__.py | 0 .../providers/asana/operators/asana_tasks.py | 0 .../airflow}/providers/asana/provider.yaml | 0 .../jira/.latest-doc-only-change.txt | 0 .../providers/atlassian/jira/CHANGELOG.rst | 0 .../providers/atlassian/jira/__init__.py | 0 .../atlassian/jira/hooks}/__init__.py | 0 .../providers/atlassian/jira/hooks/jira.py | 0 .../atlassian/jira/notifications}/__init__.py | 0 .../atlassian/jira/notifications/jira.py | 0 .../atlassian/jira/operators}/__init__.py | 0 .../atlassian/jira/operators/jira.py | 0 .../providers/atlassian/jira/provider.yaml | 0 .../atlassian/jira/sensors}/__init__.py | 0 .../providers/atlassian/jira/sensors/jira.py | 0 .../celery/.latest-doc-only-change.txt | 0 .../airflow}/providers/celery/CHANGELOG.rst | 0 .../src/airflow}/providers/celery/__init__.py | 0 .../airflow/providers/celery/cli}/__init__.py | 0 .../providers/celery/cli/celery_command.py | 0 .../providers/celery/executors/__init__.py | 0 .../celery/executors/celery_executor.py | 0 .../celery/executors/celery_executor_utils.py | 0 .../executors/celery_kubernetes_executor.py | 0 .../celery/executors/default_celery.py | 0 .../airflow}/providers/celery/provider.yaml | 0 .../providers/celery/sensors/__init__.py | 0 .../providers/celery/sensors/celery_queue.py | 0 .../cloudant/.latest-doc-only-change.txt | 0 .../airflow}/providers/cloudant/CHANGELOG.rst | 0 .../airflow}/providers/cloudant/__init__.py | 0 .../providers/cloudant/cloudant_fake.py | 0 .../providers/cloudant/hooks/__init__.py | 0 .../providers/cloudant/hooks/cloudant.py | 0 .../airflow}/providers/cloudant/provider.yaml | 0 .../kubernetes/.latest-doc-only-change.txt | 0 .../providers/cncf/kubernetes/CHANGELOG.rst | 0 .../providers/cncf/kubernetes/__init__.py | 0 .../cncf/kubernetes/backcompat}/__init__.py | 0 .../backcompat/backwards_compat_converters.py | 0 .../providers/cncf/kubernetes/callbacks.py | 0 .../cncf/kubernetes/cli}/__init__.py | 0 .../cncf/kubernetes/cli/kubernetes_command.py | 0 .../cncf/kubernetes/decorators}/__init__.py | 0 .../cncf/kubernetes/decorators/kubernetes.py | 0 .../cncf/kubernetes/executors}/__init__.py | 0 .../executors/kubernetes_executor.py | 0 .../executors/kubernetes_executor_types.py | 0 .../executors/kubernetes_executor_utils.py | 0 .../executors/local_kubernetes_executor.py | 0 .../cncf/kubernetes/hooks}/__init__.py | 0 .../cncf/kubernetes/hooks/kubernetes.py | 0 .../providers/cncf/kubernetes/k8s_model.py | 0 .../providers/cncf/kubernetes/kube_client.py | 0 .../providers/cncf/kubernetes/kube_config.py | 0 .../__init__.py | 0 .../basic_template.yaml | 0 .../kubernetes/kubernetes_helper_functions.py | 0 .../cncf/kubernetes/operators}/__init__.py | 0 .../operators/custom_object_launcher.py | 0 .../cncf/kubernetes/operators/job.py | 0 .../kubernetes/operators/kubernetes_pod.py | 0 .../cncf/kubernetes/operators/pod.py | 0 .../cncf/kubernetes/operators/resource.py | 0 .../kubernetes/operators/spark_kubernetes.py | 0 .../cncf/kubernetes/pod_generator.py | 0 .../kubernetes/pod_generator_deprecated.py | 0 .../kubernetes/pod_launcher_deprecated.py | 0 .../pod_template_file_examples}/__init__.py | 0 .../dags_in_image_template.yaml | 0 .../dags_in_volume_template.yaml | 0 .../git_sync_template.yaml | 0 .../providers/cncf/kubernetes/provider.yaml | 0 .../python_kubernetes_script.jinja2 | 0 .../kubernetes/python_kubernetes_script.py | 0 .../kubernetes/resource_convert}/__init__.py | 0 .../kubernetes/resource_convert/configmap.py | 0 .../resource_convert/env_variable.py | 0 .../kubernetes/resource_convert/secret.py | 0 .../providers/cncf/kubernetes/secret.py | 0 .../cncf/kubernetes/sensors}/__init__.py | 0 .../kubernetes/sensors/spark_kubernetes.py | 0 .../cncf/kubernetes/template_rendering.py | 0 .../cncf/kubernetes/triggers}/__init__.py | 0 .../providers/cncf/kubernetes/triggers/job.py | 0 .../kubernetes/triggers/kubernetes_pod.py | 0 .../providers/cncf/kubernetes/triggers/pod.py | 0 .../cncf/kubernetes/utils/__init__.py | 0 .../cncf/kubernetes/utils/delete_from.py | 0 .../kubernetes/utils/k8s_resource_iterator.py | 0 .../cncf/kubernetes/utils/pod_manager.py | 0 .../cncf/kubernetes/utils/xcom_sidecar.py | 0 .../cohere/.latest-doc-only-change.txt | 0 .../airflow}/providers/cohere/CHANGELOG.rst | 0 .../src/airflow}/providers/cohere/__init__.py | 0 .../providers/cohere/hooks/__init__.py | 0 .../airflow}/providers/cohere/hooks/cohere.py | 0 .../providers/cohere/operators}/__init__.py | 0 .../providers/cohere/operators/embedding.py | 0 .../airflow}/providers/cohere/provider.yaml | 0 .../common/compat/.latest-doc-only-change.txt | 0 .../providers/common/compat/CHANGELOG.rst | 0 .../providers/common/compat/__init__.py | 0 .../common/compat/assets/__init__.py | 0 .../common/compat/lineage/__init__.py | 0 .../providers/common/compat/lineage/hook.py | 0 .../common/compat/openlineage/__init__.py | 0 .../common/compat/openlineage/facet.py | 0 .../compat/openlineage/utils/__init__.py | 0 .../common/compat/openlineage/utils/utils.py | 0 .../providers/common/compat/provider.yaml | 0 .../common/compat/security/__init__.py | 0 .../common/compat/security/permissions.py | 0 .../common/io/.latest-doc-only-change.txt | 0 .../providers/common/io/CHANGELOG.rst | 0 .../airflow}/providers/common/io/__init__.py | 0 .../providers/common/io/assets/__init__.py | 0 .../common/io/assets/assets}/__init__.py | 0 .../providers/common/io/assets/file.py | 0 .../common/io/operators}/__init__.py | 0 .../common/io/operators/file_transfer.py | 0 .../providers/common/io/provider.yaml | 0 .../providers/common/io/xcom/__init__.py | 0 .../providers/common/io/xcom/backend.py | 0 .../common/sql/.latest-doc-only-change.txt | 0 .../providers/common/sql/CHANGELOG.rst | 0 .../providers/common/sql/README_API.md | 0 .../airflow}/providers/common/sql/__init__.py | 0 .../adr/0001-record-architecture-decisions.md | 0 ...-structure-from-dbapihook-derived-hooks.md | 0 .../providers/common/sql/hooks}/__init__.py | 0 .../providers/common/sql/hooks/sql.py | 8 +- .../providers/common/sql/hooks/sql.pyi | 17 +- .../common/sql/operators}/__init__.py | 0 .../providers/common/sql/operators/sql.py | 0 .../providers/common/sql/operators/sql.pyi | 18 +- .../providers/common/sql/provider.yaml | 0 .../providers/common/sql/sensors}/__init__.py | 0 .../providers/common/sql/sensors/sql.py | 0 .../providers/common/sql/sensors/sql.pyi | 5 +- .../databricks/.latest-doc-only-change.txt | 0 .../providers/databricks/CHANGELOG.rst | 0 .../airflow}/providers/databricks/__init__.py | 0 .../providers/databricks/hooks}/__init__.py | 0 .../providers/databricks/hooks/databricks.py | 0 .../databricks/hooks/databricks_base.py | 0 .../databricks/hooks/databricks_sql.py | 0 .../databricks/operators}/__init__.py | 0 .../databricks/operators/databricks.py | 0 .../databricks/operators/databricks_repos.py | 0 .../databricks/operators/databricks_sql.py | 0 .../operators/databricks_workflow.py | 0 .../providers/databricks/plugins}/__init__.py | 0 .../databricks/plugins/databricks_workflow.py | 0 .../providers/databricks/provider.yaml | 0 .../providers/databricks/sensors}/__init__.py | 0 .../sensors/databricks_partition.py | 0 .../databricks/sensors/databricks_sql.py | 0 .../databricks/triggers}/__init__.py | 0 .../databricks/triggers/databricks.py | 0 .../providers/databricks/utils}/__init__.py | 0 .../providers/databricks/utils/databricks.py | 0 .../datadog/.latest-doc-only-change.txt | 0 .../airflow}/providers/datadog/CHANGELOG.rst | 0 .../airflow}/providers/datadog/__init__.py | 0 .../providers/datadog/hooks}/__init__.py | 0 .../providers/datadog/hooks/datadog.py | 0 .../airflow}/providers/datadog/provider.yaml | 0 .../providers/datadog/sensors}/__init__.py | 0 .../providers/datadog/sensors/datadog.py | 0 .../dbt/cloud/.latest-doc-only-change.txt | 0 .../providers/dbt/cloud/CHANGELOG.rst | 0 .../airflow}/providers/dbt/cloud/__init__.py | 0 .../providers/dbt/cloud/hooks/__init__.py | 0 .../airflow}/providers/dbt/cloud/hooks/dbt.py | 5 +- .../providers/dbt/cloud/operators/__init__.py | 0 .../providers/dbt/cloud/operators/dbt.py | 0 .../providers/dbt/cloud/provider.yaml | 0 .../providers/dbt/cloud}/sensors/__init__.py | 0 .../providers/dbt/cloud/sensors/dbt.py | 0 .../providers/dbt/cloud/triggers/__init__.py | 0 .../providers/dbt/cloud/triggers/dbt.py | 0 .../providers/dbt/cloud/utils/__init__.py | 0 .../providers/dbt/cloud/utils/openlineage.py | 0 .../dingding/.latest-doc-only-change.txt | 0 .../airflow}/providers/dingding/CHANGELOG.rst | 0 .../airflow}/providers/dingding/__init__.py | 0 .../providers/dingding/hooks}/__init__.py | 0 .../providers/dingding/hooks/dingding.py | 0 .../providers/dingding/operators}/__init__.py | 0 .../providers/dingding/operators/dingding.py | 0 .../airflow}/providers/dingding/provider.yaml | 0 .../discord/.latest-doc-only-change.txt | 0 .../airflow}/providers/discord/CHANGELOG.rst | 0 .../airflow}/providers/discord/__init__.py | 0 .../providers/discord/hooks}/__init__.py | 0 .../discord/hooks/discord_webhook.py | 0 .../discord/notifications}/__init__.py | 0 .../discord/notifications/discord.py | 0 .../providers/discord/operators}/__init__.py | 0 .../discord/operators/discord_webhook.py | 0 .../airflow}/providers/discord/provider.yaml | 0 .../docker/.latest-doc-only-change.txt | 0 .../airflow}/providers/docker/CHANGELOG.rst | 0 .../src/airflow}/providers/docker/__init__.py | 0 .../providers/docker/decorators}/__init__.py | 0 .../providers/docker/decorators/docker.py | 0 .../airflow}/providers/docker/exceptions.py | 0 .../providers/docker/hooks}/__init__.py | 0 .../airflow}/providers/docker/hooks/docker.py | 0 .../providers/docker/operators}/__init__.py | 0 .../providers/docker/operators/docker.py | 0 .../docker/operators/docker_swarm.py | 0 .../airflow}/providers/docker/provider.yaml | 0 .../src/airflow}/providers/edge/CHANGELOG.rst | 0 .../src/airflow}/providers/edge/__init__.py | 0 .../providers/edge/api_endpoints/__init__.py | 0 .../edge/api_endpoints/health_endpoint.py | 0 .../edge/api_endpoints/rpc_api_endpoint.py | 0 .../airflow}/providers/edge/cli/__init__.py | 0 .../providers/edge/cli/edge_command.py | 0 .../providers/edge/example_dags/__init__.py | 0 .../edge/example_dags/integration_test.py | 0 .../providers/edge/models/__init__.py | 0 .../providers/edge/models/edge_job.py | 0 .../providers/edge/models/edge_logs.py | 0 .../providers/edge/models/edge_worker.py | 0 .../providers/edge/openapi/__init__.py | 0 .../edge/openapi/edge_worker_api_v1.yaml | 0 .../providers/edge/plugins/__init__.py | 0 .../edge/plugins/edge_executor_plugin.py | 0 .../plugins/templates/edge_worker_hosts.html | 0 .../plugins/templates/edge_worker_jobs.html | 0 .../src/airflow}/providers/edge/provider.yaml | 0 .../elasticsearch/.latest-doc-only-change.txt | 0 .../providers/elasticsearch/CHANGELOG.rst | 0 .../providers/elasticsearch/__init__.py | 0 .../elasticsearch/hooks}/__init__.py | 0 .../elasticsearch/hooks/elasticsearch.py | 0 .../providers/elasticsearch/log/__init__.py | 0 .../elasticsearch/log/es_json_formatter.py | 0 .../elasticsearch/log/es_response.py | 0 .../elasticsearch/log/es_task_handler.py | 0 .../providers/elasticsearch/provider.yaml | 0 .../exasol/.latest-doc-only-change.txt | 0 .../airflow}/providers/exasol/CHANGELOG.rst | 0 .../src/airflow}/providers/exasol/__init__.py | 0 .../providers/exasol}/hooks/__init__.py | 0 .../airflow}/providers/exasol/hooks/exasol.py | 0 .../providers/exasol/operators}/__init__.py | 0 .../providers/exasol/operators/exasol.py | 0 .../airflow}/providers/exasol/provider.yaml | 0 .../src/airflow}/providers/fab/CHANGELOG.rst | 0 .../src/airflow}/providers/fab/__init__.py | 0 .../src/airflow}/providers/fab/alembic.ini | 0 .../providers/fab/auth_manager}/__init__.py | 0 .../fab/auth_manager/api}/__init__.py | 0 .../fab/auth_manager/api/auth}/__init__.py | 0 .../api/auth/backend}/__init__.py | 0 .../api/auth/backend/basic_auth.py | 0 .../api/auth/backend/kerberos_auth.py | 0 .../auth_manager/api_endpoints/__init__.py | 0 .../role_and_permission_endpoint.py | 0 .../api_endpoints/user_endpoint.py | 0 .../auth_manager/cli_commands}/__init__.py | 0 .../auth_manager/cli_commands/db_command.py | 0 .../auth_manager/cli_commands/definition.py | 0 .../auth_manager/cli_commands/role_command.py | 0 .../cli_commands/sync_perm_command.py | 0 .../auth_manager/cli_commands/user_command.py | 0 .../fab/auth_manager/cli_commands/utils.py | 0 .../fab/auth_manager/decorators}/__init__.py | 0 .../fab/auth_manager/decorators/auth.py | 0 .../fab/auth_manager/fab_auth_manager.py | 0 .../fab/auth_manager/models/__init__.py | 0 .../fab/auth_manager/models/anonymous_user.py | 0 .../providers/fab/auth_manager/models/db.py | 9 +- .../fab/auth_manager/openapi/__init__.py | 0 .../fab/auth_manager/openapi/v1.yaml | 0 .../security_manager}/__init__.py | 0 .../security_manager/constants.py | 0 .../auth_manager/security_manager/override.py | 0 .../fab/auth_manager/views/__init__.py | 0 .../fab/auth_manager/views/permissions.py | 0 .../fab/auth_manager/views/roles_list.py | 0 .../providers/fab/auth_manager/views/user.py | 0 .../fab/auth_manager/views/user_edit.py | 0 .../fab/auth_manager/views/user_stats.py | 0 .../airflow}/providers/fab/migrations/README | 0 .../providers/fab/migrations/__init__.py | 0 .../airflow}/providers/fab/migrations/env.py | 0 .../providers/fab/migrations/script.py.mako | 0 .../0001_1_4_0_placeholder_migration.py | 0 .../fab/migrations/versions/__init__.py | 0 .../src/airflow}/providers/fab/provider.yaml | 0 .../facebook/.latest-doc-only-change.txt | 0 .../airflow}/providers/facebook/CHANGELOG.rst | 0 .../airflow}/providers/facebook/__init__.py | 0 .../providers/facebook/ads/__init__.py | 0 .../providers/facebook/ads/hooks/__init__.py | 0 .../providers/facebook/ads/hooks/ads.py | 0 .../airflow}/providers/facebook/provider.yaml | 0 .../providers/ftp/.latest-doc-only-change.txt | 0 .../src/airflow}/providers/ftp/CHANGELOG.rst | 0 .../src/airflow}/providers/ftp/__init__.py | 0 .../airflow/providers/ftp/hooks}/__init__.py | 0 .../src/airflow}/providers/ftp/hooks/ftp.py | 0 .../providers/ftp/operators/__init__.py | 0 .../airflow}/providers/ftp/operators/ftp.py | 0 .../src/airflow}/providers/ftp/provider.yaml | 0 .../providers/ftp/sensors}/__init__.py | 0 .../src/airflow}/providers/ftp/sensors/ftp.py | 0 .../github/.latest-doc-only-change.txt | 0 .../airflow}/providers/github/CHANGELOG.rst | 0 .../src/airflow}/providers/github/__init__.py | 0 .../providers/github/hooks/__init__.py | 0 .../airflow}/providers/github/hooks/github.py | 0 .../providers/github/operators/__init__.py | 0 .../providers/github/operators/github.py | 0 .../airflow}/providers/github/provider.yaml | 0 .../providers/github/sensors/__init__.py | 0 .../providers/github/sensors/github.py | 0 .../google/.latest-doc-only-change.txt | 0 .../airflow}/providers/google/CHANGELOG.rst | 0 .../src/airflow}/providers/google/__init__.py | 0 .../airflow}/providers/google/ads/.gitignore | 0 .../airflow}/providers/google/ads/__init__.py | 0 .../providers/google/ads/hooks/__init__.py | 0 .../providers/google/ads/hooks/ads.py | 0 .../google/ads/operators/__init__.py | 0 .../providers/google/ads/operators/ads.py | 0 .../google/ads/transfers/__init__.py | 0 .../google/ads/transfers/ads_to_gcs.py | 0 .../providers/google/cloud/__init__.py | 0 .../google/cloud/_internal_client/__init__.py | 0 .../_internal_client/secret_manager_client.py | 0 .../google/cloud/example_dags/__init__.py | 0 .../cloud/example_dags/example_cloud_task.py | 0 .../example_facebook_ads_to_gcs.py | 0 .../cloud/example_dags/example_looker.py | 0 .../example_dags/example_presto_to_gcs.py | 0 .../example_dags/example_salesforce_to_gcs.py | 0 .../providers/google/cloud/fs/__init__.py | 0 .../airflow}/providers/google/cloud/fs/gcs.py | 0 .../providers/google/cloud/hooks/__init__.py | 0 .../providers/google/cloud/hooks/automl.py | 0 .../providers/google/cloud/hooks/bigquery.py | 0 .../google/cloud/hooks/bigquery_dts.py | 0 .../providers/google/cloud/hooks/bigtable.py | 0 .../google/cloud/hooks/cloud_batch.py | 0 .../google/cloud/hooks/cloud_build.py | 0 .../google/cloud/hooks/cloud_composer.py | 0 .../google/cloud/hooks/cloud_memorystore.py | 0 .../providers/google/cloud/hooks/cloud_run.py | 0 .../providers/google/cloud/hooks/cloud_sql.py | 0 .../hooks/cloud_storage_transfer_service.py | 0 .../providers/google/cloud/hooks/compute.py | 0 .../google/cloud/hooks/compute_ssh.py | 0 .../google/cloud/hooks/datacatalog.py | 0 .../providers/google/cloud/hooks/dataflow.py | 0 .../providers/google/cloud/hooks/dataform.py | 0 .../google/cloud/hooks/datafusion.py | 0 .../google/cloud/hooks/datapipeline.py | 0 .../providers/google/cloud/hooks/dataplex.py | 0 .../providers/google/cloud/hooks/dataprep.py | 0 .../providers/google/cloud/hooks/dataproc.py | 0 .../google/cloud/hooks/dataproc_metastore.py | 0 .../providers/google/cloud/hooks/datastore.py | 0 .../providers/google/cloud/hooks/dlp.py | 0 .../providers/google/cloud/hooks/functions.py | 0 .../providers/google/cloud/hooks/gcs.py | 0 .../providers/google/cloud/hooks/gdm.py | 0 .../providers/google/cloud/hooks/kms.py | 0 .../google/cloud/hooks/kubernetes_engine.py | 0 .../google/cloud/hooks/life_sciences.py | 0 .../providers/google/cloud/hooks/looker.py | 0 .../providers/google/cloud/hooks/mlengine.py | 0 .../google/cloud/hooks/natural_language.py | 0 .../providers/google/cloud/hooks/os_login.py | 0 .../providers/google/cloud/hooks/pubsub.py | 0 .../google/cloud/hooks/secret_manager.py | 0 .../providers/google/cloud/hooks/spanner.py | 0 .../google/cloud/hooks/speech_to_text.py | 0 .../google/cloud/hooks/stackdriver.py | 0 .../providers/google/cloud/hooks/tasks.py | 0 .../google/cloud/hooks/text_to_speech.py | 0 .../providers/google/cloud/hooks/translate.py | 0 .../google/cloud/hooks/vertex_ai/__init__.py | 0 .../google/cloud/hooks/vertex_ai/auto_ml.py | 0 .../hooks/vertex_ai/batch_prediction_job.py | 0 .../cloud/hooks/vertex_ai/custom_job.py | 0 .../google/cloud/hooks/vertex_ai/dataset.py | 0 .../cloud/hooks/vertex_ai/endpoint_service.py | 0 .../cloud/hooks/vertex_ai/generative_model.py | 0 .../vertex_ai/hyperparameter_tuning_job.py | 0 .../cloud/hooks/vertex_ai/model_service.py | 0 .../cloud/hooks/vertex_ai/pipeline_job.py | 0 .../hooks/vertex_ai/prediction_service.py | 0 .../google/cloud/hooks/video_intelligence.py | 0 .../providers/google/cloud/hooks/vision.py | 0 .../providers/google/cloud/hooks/workflows.py | 0 .../providers/google/cloud/links/__init__.py | 0 .../providers/google/cloud/links/automl.py | 0 .../providers/google/cloud/links/base.py | 0 .../providers/google/cloud/links/bigquery.py | 0 .../google/cloud/links/bigquery_dts.py | 0 .../providers/google/cloud/links/bigtable.py | 0 .../google/cloud/links/cloud_build.py | 0 .../google/cloud/links/cloud_functions.py | 0 .../google/cloud/links/cloud_memorystore.py | 0 .../providers/google/cloud/links/cloud_sql.py | 0 .../cloud/links/cloud_storage_transfer.py | 0 .../google/cloud/links/cloud_tasks.py | 0 .../providers/google/cloud/links/compute.py | 0 .../cloud/links/data_loss_prevention.py | 0 .../google/cloud/links/datacatalog.py | 0 .../providers/google/cloud/links/dataflow.py | 0 .../providers/google/cloud/links/dataform.py | 0 .../google/cloud/links/datafusion.py | 0 .../providers/google/cloud/links/dataplex.py | 0 .../providers/google/cloud/links/dataprep.py | 0 .../providers/google/cloud/links/dataproc.py | 0 .../providers/google/cloud/links/datastore.py | 0 .../google/cloud/links/kubernetes_engine.py | 0 .../google/cloud/links/life_sciences.py | 0 .../providers/google/cloud/links/mlengine.py | 0 .../providers/google/cloud/links/pubsub.py | 0 .../providers/google/cloud/links/spanner.py | 0 .../google/cloud/links/stackdriver.py | 0 .../providers/google/cloud/links/translate.py | 0 .../providers/google/cloud/links/vertex_ai.py | 0 .../providers/google/cloud/links/workflows.py | 0 .../providers/google/cloud/log/__init__.py | 0 .../google/cloud/log/gcs_task_handler.py | 0 .../cloud/log/stackdriver_task_handler.py | 0 .../openlineage/BigQueryErrorRunFacet.json | 0 .../openlineage/BigQueryJobRunFacet.json | 0 .../google/cloud/openlineage/__init__.py | 0 .../google/cloud/openlineage/mixins.py | 0 .../google/cloud/openlineage/utils.py | 0 .../google/cloud/operators/__init__.py | 0 .../google/cloud/operators/automl.py | 0 .../google/cloud/operators/bigquery.py | 2 +- .../google/cloud/operators/bigquery_dts.py | 0 .../google/cloud/operators/bigtable.py | 0 .../google/cloud/operators/cloud_base.py | 0 .../google/cloud/operators/cloud_batch.py | 0 .../google/cloud/operators/cloud_build.py | 0 .../google/cloud/operators/cloud_composer.py | 0 .../cloud/operators/cloud_memorystore.py | 0 .../google/cloud/operators/cloud_run.py | 0 .../google/cloud/operators/cloud_sql.py | 0 .../cloud_storage_transfer_service.py | 0 .../google/cloud/operators/compute.py | 0 .../google/cloud/operators/datacatalog.py | 0 .../google/cloud/operators/dataflow.py | 0 .../google/cloud/operators/dataform.py | 0 .../google/cloud/operators/datafusion.py | 0 .../google/cloud/operators/datapipeline.py | 0 .../google/cloud/operators/dataplex.py | 0 .../google/cloud/operators/dataprep.py | 0 .../google/cloud/operators/dataproc.py | 0 .../cloud/operators/dataproc_metastore.py | 0 .../google/cloud/operators/datastore.py | 0 .../providers/google/cloud/operators/dlp.py | 0 .../google/cloud/operators/functions.py | 0 .../providers/google/cloud/operators/gcs.py | 0 .../cloud/operators/kubernetes_engine.py | 0 .../google/cloud/operators/life_sciences.py | 0 .../google/cloud/operators/looker.py | 0 .../google/cloud/operators/mlengine.py | 0 .../cloud/operators/natural_language.py | 0 .../google/cloud/operators/pubsub.py | 0 .../google/cloud/operators/spanner.py | 0 .../google/cloud/operators/speech_to_text.py | 0 .../google/cloud/operators/stackdriver.py | 0 .../providers/google/cloud/operators/tasks.py | 0 .../google/cloud/operators/text_to_speech.py | 0 .../google/cloud/operators/translate.py | 0 .../cloud/operators/translate_speech.py | 0 .../cloud/operators/vertex_ai/__init__.py | 0 .../cloud/operators/vertex_ai/auto_ml.py | 0 .../vertex_ai/batch_prediction_job.py | 0 .../cloud/operators/vertex_ai/custom_job.py | 0 .../cloud/operators/vertex_ai/dataset.py | 0 .../operators/vertex_ai/endpoint_service.py | 0 .../operators/vertex_ai/generative_model.py | 0 .../vertex_ai/hyperparameter_tuning_job.py | 0 .../operators/vertex_ai/model_service.py | 0 .../cloud/operators/vertex_ai/pipeline_job.py | 0 .../cloud/operators/video_intelligence.py | 0 .../google/cloud/operators/vision.py | 0 .../google/cloud/operators/workflows.py | 0 .../google/cloud/secrets/__init__.py | 0 .../google/cloud/secrets/secret_manager.py | 0 .../google/cloud/sensors/__init__.py | 0 .../google/cloud/sensors/bigquery.py | 0 .../google/cloud/sensors/bigquery_dts.py | 0 .../google/cloud/sensors/bigtable.py | 0 .../google/cloud/sensors/cloud_composer.py | 0 .../sensors/cloud_storage_transfer_service.py | 0 .../google/cloud/sensors/dataflow.py | 0 .../google/cloud/sensors/dataform.py | 0 .../google/cloud/sensors/datafusion.py | 0 .../google/cloud/sensors/dataplex.py | 0 .../google/cloud/sensors/dataprep.py | 0 .../google/cloud/sensors/dataproc.py | 0 .../cloud/sensors/dataproc_metastore.py | 0 .../providers/google/cloud/sensors/gcs.py | 0 .../providers/google/cloud/sensors/looker.py | 0 .../providers/google/cloud/sensors/pubsub.py | 0 .../providers/google/cloud/sensors/tasks.py | 0 .../google/cloud/sensors/workflows.py | 0 .../google/cloud/transfers/__init__.py | 0 .../google/cloud/transfers/adls_to_gcs.py | 0 .../cloud/transfers/azure_blob_to_gcs.py | 0 .../cloud/transfers/azure_fileshare_to_gcs.py | 0 .../cloud/transfers/bigquery_to_bigquery.py | 0 .../google/cloud/transfers/bigquery_to_gcs.py | 0 .../cloud/transfers/bigquery_to_mssql.py | 0 .../cloud/transfers/bigquery_to_mysql.py | 0 .../cloud/transfers/bigquery_to_postgres.py | 0 .../google/cloud/transfers/bigquery_to_sql.py | 0 .../google/cloud/transfers/calendar_to_gcs.py | 0 .../cloud/transfers/cassandra_to_gcs.py | 0 .../cloud/transfers/facebook_ads_to_gcs.py | 0 .../google/cloud/transfers/gcs_to_bigquery.py | 0 .../google/cloud/transfers/gcs_to_gcs.py | 0 .../google/cloud/transfers/gcs_to_local.py | 0 .../google/cloud/transfers/gcs_to_sftp.py | 0 .../google/cloud/transfers/gdrive_to_gcs.py | 0 .../google/cloud/transfers/gdrive_to_local.py | 0 .../google/cloud/transfers/local_to_gcs.py | 0 .../google/cloud/transfers/mssql_to_gcs.py | 0 .../google/cloud/transfers/mysql_to_gcs.py | 0 .../google/cloud/transfers/oracle_to_gcs.py | 0 .../google/cloud/transfers/postgres_to_gcs.py | 0 .../google/cloud/transfers/presto_to_gcs.py | 0 .../google/cloud/transfers/s3_to_gcs.py | 0 .../cloud/transfers/salesforce_to_gcs.py | 0 .../google/cloud/transfers/sftp_to_gcs.py | 0 .../google/cloud/transfers/sheets_to_gcs.py | 0 .../google/cloud/transfers/sql_to_gcs.py | 0 .../google/cloud/transfers/trino_to_gcs.py | 0 .../google/cloud/triggers/__init__.py | 0 .../google/cloud/triggers/bigquery.py | 0 .../google/cloud/triggers/bigquery_dts.py | 0 .../google/cloud/triggers/cloud_batch.py | 0 .../google/cloud/triggers/cloud_build.py | 0 .../google/cloud/triggers/cloud_composer.py | 0 .../google/cloud/triggers/cloud_run.py | 0 .../google/cloud/triggers/cloud_sql.py | 0 .../cloud_storage_transfer_service.py | 0 .../google/cloud/triggers/dataflow.py | 0 .../google/cloud/triggers/datafusion.py | 0 .../google/cloud/triggers/dataplex.py | 0 .../google/cloud/triggers/dataproc.py | 0 .../providers/google/cloud/triggers/gcs.py | 0 .../cloud/triggers/kubernetes_engine.py | 0 .../google/cloud/triggers/mlengine.py | 0 .../providers/google/cloud/triggers/pubsub.py | 0 .../google/cloud/triggers/vertex_ai.py | 0 .../providers/google/cloud/utils}/__init__.py | 0 .../providers/google/cloud/utils/bigquery.py | 0 .../google/cloud/utils/bigquery_get_data.py | 0 .../cloud/utils/credentials_provider.py | 0 .../providers/google/cloud/utils/dataform.py | 0 .../google/cloud/utils/datafusion.py | 0 .../providers/google/cloud/utils/dataproc.py | 0 .../cloud/utils/external_token_supplier.py | 0 .../google/cloud/utils/field_sanitizer.py | 0 .../google/cloud/utils/field_validator.py | 0 .../providers/google/cloud/utils/helpers.py | 0 .../cloud/utils/mlengine_operator_utils.py | 0 .../utils/mlengine_prediction_summary.py | 0 .../providers/google/common/__init__.py | 0 .../google/common/auth_backend/__init__.py | 0 .../common/auth_backend/google_openid.py | 0 .../providers/google/common/consts.py | 0 .../providers/google/common/deprecated.py | 0 .../providers/google/common/hooks/__init__.py | 0 .../google/common/hooks/base_google.py | 0 .../google/common/hooks/discovery_api.py | 0 .../providers/google/common/links/__init__.py | 0 .../providers/google/common/links/storage.py | 0 .../providers/google/common/utils/__init__.py | 0 .../common/utils/id_token_credentials.py | 0 .../providers/google/datasets/__init__.py | 0 .../providers/google/datasets/bigquery.py | 0 .../providers/google/firebase/__init__.py | 0 .../google/firebase/hooks/__init__.py | 0 .../google/firebase/hooks/firestore.py | 0 .../google/firebase/operators/__init__.py | 0 .../google/firebase/operators/firestore.py | 0 .../providers/google/go_module_utils.py | 0 .../providers/google/leveldb/__init__.py | 0 .../google/leveldb/hooks/__init__.py | 0 .../providers/google/leveldb/hooks/leveldb.py | 0 .../google/leveldb/operators/__init__.py | 0 .../google/leveldb/operators/leveldb.py | 0 .../google/marketing_platform/__init__.py | 0 .../example_dags/__init__.py | 0 .../example_dags/example_display_video.py | 0 .../marketing_platform/hooks/__init__.py | 0 .../marketing_platform/hooks/analytics.py | 0 .../hooks/analytics_admin.py | 0 .../hooks/campaign_manager.py | 0 .../marketing_platform/hooks/display_video.py | 0 .../marketing_platform/hooks/search_ads.py | 0 .../marketing_platform/links/__init__.py | 0 .../links/analytics_admin.py | 0 .../marketing_platform/operators/__init__.py | 0 .../marketing_platform/operators/analytics.py | 0 .../operators/analytics_admin.py | 0 .../operators/campaign_manager.py | 0 .../operators/display_video.py | 0 .../operators/search_ads.py | 0 .../marketing_platform/sensors/__init__.py | 0 .../sensors/campaign_manager.py | 0 .../sensors/display_video.py | 0 .../airflow}/providers/google/provider.yaml | 0 .../providers/google/suite}/__init__.py | 0 .../providers/google/suite/hooks}/__init__.py | 0 .../providers/google/suite/hooks/calendar.py | 0 .../providers/google/suite/hooks/drive.py | 0 .../providers/google/suite/hooks/sheets.py | 0 .../google/suite/operators}/__init__.py | 0 .../google/suite/operators/sheets.py | 0 .../google/suite/sensors/__init__.py | 0 .../providers/google/suite/sensors/drive.py | 0 .../google/suite/transfers/__init__.py | 0 .../google/suite/transfers/gcs_to_gdrive.py | 0 .../google/suite/transfers/gcs_to_sheets.py | 0 .../google/suite/transfers/local_to_drive.py | 0 .../google/suite/transfers/sql_to_sheets.py | 0 .../grpc/.latest-doc-only-change.txt | 0 .../src/airflow}/providers/grpc/CHANGELOG.rst | 0 .../src/airflow}/providers/grpc/__init__.py | 0 .../airflow/providers/grpc/hooks}/__init__.py | 0 .../src/airflow}/providers/grpc/hooks/grpc.py | 0 .../providers/grpc/operators}/__init__.py | 0 .../airflow}/providers/grpc/operators/grpc.py | 0 .../src/airflow}/providers/grpc/provider.yaml | 0 .../hashicorp/.latest-doc-only-change.txt | 0 .../providers/hashicorp/CHANGELOG.rst | 0 .../airflow}/providers/hashicorp/__init__.py | 0 .../hashicorp/_internal_client/__init__.py | 0 .../_internal_client/vault_client.py | 0 .../providers/hashicorp/hooks/__init__.py | 0 .../providers/hashicorp/hooks/vault.py | 0 .../providers/hashicorp/provider.yaml | 0 .../providers/hashicorp/secrets/__init__.py | 0 .../providers/hashicorp/secrets/vault.py | 0 .../http/.latest-doc-only-change.txt | 0 .../src/airflow}/providers/http/CHANGELOG.rst | 0 .../src/airflow}/providers/http/__init__.py | 0 .../airflow/providers/http/hooks}/__init__.py | 0 .../src/airflow}/providers/http/hooks/http.py | 0 .../providers/http/operators}/__init__.py | 0 .../airflow}/providers/http/operators/http.py | 0 .../src/airflow}/providers/http/provider.yaml | 0 .../providers/http/sensors}/__init__.py | 0 .../airflow}/providers/http/sensors/http.py | 0 .../providers/http/triggers}/__init__.py | 0 .../airflow}/providers/http/triggers/http.py | 0 .../imap/.latest-doc-only-change.txt | 0 .../src/airflow}/providers/imap/CHANGELOG.rst | 0 .../src/airflow}/providers/imap/__init__.py | 0 .../airflow/providers/imap/hooks}/__init__.py | 0 .../src/airflow}/providers/imap/hooks/imap.py | 0 .../src/airflow}/providers/imap/provider.yaml | 0 .../providers/imap/sensors}/__init__.py | 0 .../providers/imap/sensors/imap_attachment.py | 0 .../influxdb/.latest-doc-only-change.txt | 0 .../airflow}/providers/influxdb/CHANGELOG.rst | 0 .../airflow}/providers/influxdb/__init__.py | 0 .../providers/influxdb/hooks/__init__.py | 0 .../providers/influxdb/hooks/influxdb.py | 0 .../providers/influxdb/operators/__init__.py | 0 .../providers/influxdb/operators/influxdb.py | 0 .../airflow}/providers/influxdb/provider.yaml | 0 .../jdbc/.latest-doc-only-change.txt | 0 .../src/airflow}/providers/jdbc/CHANGELOG.rst | 0 .../src/airflow}/providers/jdbc/__init__.py | 0 .../airflow/providers/jdbc/hooks}/__init__.py | 0 .../src/airflow}/providers/jdbc/hooks/jdbc.py | 0 .../providers/jdbc/operators}/__init__.py | 0 .../airflow}/providers/jdbc/operators/jdbc.py | 0 .../src/airflow}/providers/jdbc/provider.yaml | 0 .../jenkins/.latest-doc-only-change.txt | 0 .../airflow}/providers/jenkins/CHANGELOG.rst | 0 .../airflow}/providers/jenkins/__init__.py | 0 .../providers/jenkins/hooks}/__init__.py | 0 .../providers/jenkins/hooks/jenkins.py | 0 .../providers/jenkins/operators}/__init__.py | 0 .../jenkins/operators/jenkins_job_trigger.py | 0 .../airflow}/providers/jenkins/provider.yaml | 0 .../providers/jenkins/sensors}/__init__.py | 0 .../providers/jenkins/sensors/jenkins.py | 0 .../azure/.latest-doc-only-change.txt | 0 .../providers/microsoft/azure/CHANGELOG.rst | 0 .../providers/microsoft/azure/__init__.py | 0 .../providers/microsoft/azure/fs/__init__.py | 0 .../providers/microsoft/azure/fs/adls.py | 0 .../microsoft/azure/hooks}/__init__.py | 0 .../providers/microsoft/azure/hooks/adx.py | 0 .../providers/microsoft/azure/hooks/asb.py | 26 +- .../microsoft/azure/hooks/base_azure.py | 0 .../providers/microsoft/azure/hooks/batch.py | 0 .../azure/hooks/container_instance.py | 0 .../azure/hooks/container_registry.py | 0 .../microsoft/azure/hooks/container_volume.py | 0 .../providers/microsoft/azure/hooks/cosmos.py | 0 .../microsoft/azure/hooks/data_factory.py | 0 .../microsoft/azure/hooks/data_lake.py | 0 .../microsoft/azure/hooks/fileshare.py | 0 .../microsoft/azure/hooks/msgraph.py | 0 .../microsoft/azure/hooks/powerbi.py | 0 .../microsoft/azure/hooks/synapse.py | 0 .../providers/microsoft/azure/hooks/wasb.py | 0 .../providers/microsoft/azure/log/__init__.py | 0 .../microsoft/azure/log/wasb_task_handler.py | 0 .../microsoft/azure/operators}/__init__.py | 0 .../microsoft/azure/operators/adls.py | 0 .../microsoft/azure/operators/adx.py | 0 .../microsoft/azure/operators/asb.py | 0 .../microsoft/azure/operators/batch.py | 0 .../azure/operators/container_instances.py | 0 .../microsoft/azure/operators/cosmos.py | 0 .../microsoft/azure/operators/data_factory.py | 0 .../microsoft/azure/operators/msgraph.py | 0 .../microsoft/azure/operators/powerbi.py | 0 .../microsoft/azure/operators/synapse.py | 0 .../azure/operators/wasb_delete_blob.py | 0 .../providers/microsoft/azure/provider.yaml | 0 .../microsoft/azure/secrets/__init__.py | 0 .../microsoft/azure/secrets/key_vault.py | 0 .../microsoft/azure/sensors}/__init__.py | 0 .../microsoft/azure/sensors/cosmos.py | 0 .../microsoft/azure/sensors/data_factory.py | 0 .../microsoft/azure/sensors/msgraph.py | 0 .../providers/microsoft/azure/sensors/wasb.py | 0 .../microsoft/azure/transfers/__init__.py | 0 .../azure/transfers/azure_blob_to_gcs.py | 0 .../azure/transfers/local_to_adls.py | 0 .../azure/transfers/local_to_wasb.py | 0 .../transfers/oracle_to_azure_data_lake.py | 0 .../microsoft/azure/transfers/s3_to_wasb.py | 0 .../microsoft/azure/transfers/sftp_to_wasb.py | 0 .../microsoft/azure/triggers/__init__.py | 0 .../microsoft/azure/triggers/data_factory.py | 0 .../microsoft/azure/triggers/msgraph.py | 0 .../microsoft/azure/triggers/powerbi.py | 0 .../microsoft/azure/triggers/wasb.py | 0 .../providers/microsoft/azure/utils.py | 0 .../mssql/.latest-doc-only-change.txt | 0 .../providers/microsoft/mssql/CHANGELOG.rst | 0 .../providers/microsoft/mssql/__init__.py | 0 .../microsoft/mssql/hooks}/__init__.py | 0 .../providers/microsoft/mssql/hooks/mssql.py | 0 .../microsoft/mssql/operators}/__init__.py | 0 .../microsoft/mssql/operators/mssql.py | 0 .../providers/microsoft/mssql/provider.yaml | 0 .../psrp/.latest-doc-only-change.txt | 0 .../providers/microsoft/psrp/CHANGELOG.rst | 0 .../providers/microsoft/psrp/__init__.py | 0 .../microsoft/psrp}/hooks/__init__.py | 0 .../providers/microsoft/psrp/hooks/psrp.py | 0 .../microsoft/psrp}/operators/__init__.py | 0 .../microsoft/psrp/operators/psrp.py | 20 +- .../providers/microsoft/psrp/provider.yaml | 0 .../winrm/.latest-doc-only-change.txt | 0 .../providers/microsoft/winrm/CHANGELOG.rst | 0 .../providers/microsoft/winrm/__init__.py | 0 .../microsoft/winrm}/hooks/__init__.py | 0 .../providers/microsoft/winrm/hooks/winrm.py | 0 .../microsoft/winrm}/operators/__init__.py | 0 .../microsoft/winrm/operators/winrm.py | 0 .../providers/microsoft/winrm/provider.yaml | 0 .../mongo/.latest-doc-only-change.txt | 0 .../airflow}/providers/mongo/CHANGELOG.rst | 0 .../src/airflow}/providers/mongo/__init__.py | 0 .../providers/mongo}/hooks/__init__.py | 0 .../airflow}/providers/mongo/hooks/mongo.py | 0 .../airflow}/providers/mongo/provider.yaml | 0 .../providers/mongo/sensors}/__init__.py | 0 .../airflow}/providers/mongo/sensors/mongo.py | 0 .../mysql/.latest-doc-only-change.txt | 0 .../airflow}/providers/mysql/CHANGELOG.rst | 0 .../src/airflow}/providers/mysql/__init__.py | 0 .../providers/mysql/assets/__init__.py | 0 .../airflow}/providers/mysql/assets/mysql.py | 0 .../providers/mysql}/hooks/__init__.py | 0 .../airflow}/providers/mysql/hooks/mysql.py | 0 .../providers/mysql/operators}/__init__.py | 0 .../providers/mysql/operators/mysql.py | 0 .../airflow}/providers/mysql/provider.yaml | 0 .../providers/mysql/transfers/__init__.py | 0 .../mysql/transfers/presto_to_mysql.py | 0 .../providers/mysql/transfers/s3_to_mysql.py | 0 .../mysql/transfers/trino_to_mysql.py | 0 .../mysql/transfers/vertica_to_mysql.py | 0 .../neo4j/.latest-doc-only-change.txt | 0 .../airflow}/providers/neo4j/CHANGELOG.rst | 0 .../src/airflow}/providers/neo4j/README.md | 0 .../src/airflow}/providers/neo4j/__init__.py | 0 .../providers/neo4j}/hooks/__init__.py | 0 .../airflow}/providers/neo4j/hooks/neo4j.py | 0 .../providers/neo4j}/operators/__init__.py | 0 .../providers/neo4j/operators/neo4j.py | 0 .../airflow}/providers/neo4j/provider.yaml | 0 .../odbc/.latest-doc-only-change.txt | 0 .../src/airflow}/providers/odbc/CHANGELOG.rst | 0 .../src/airflow}/providers/odbc/__init__.py | 0 .../airflow}/providers/odbc/hooks/__init__.py | 0 .../src/airflow}/providers/odbc/hooks/odbc.py | 0 .../src/airflow}/providers/odbc/provider.yaml | 0 .../openai/.latest-doc-only-change.txt | 0 .../airflow}/providers/openai/CHANGELOG.rst | 0 .../src/airflow}/providers/openai/__init__.py | 0 .../airflow}/providers/openai/exceptions.py | 0 .../providers/openai/hooks/__init__.py | 0 .../airflow}/providers/openai/hooks/openai.py | 0 .../providers/openai/operators/__init__.py | 0 .../providers/openai/operators/openai.py | 0 .../airflow}/providers/openai/provider.yaml | 0 .../providers/openai/triggers/__init__.py | 0 .../providers/openai/triggers/openai.py | 0 .../openfaas/.latest-doc-only-change.txt | 0 .../airflow}/providers/openfaas/CHANGELOG.rst | 0 .../airflow}/providers/openfaas/__init__.py | 0 .../providers/openfaas}/hooks/__init__.py | 0 .../providers/openfaas/hooks/openfaas.py | 0 .../airflow}/providers/openfaas/provider.yaml | 0 .../openlineage/.latest-doc-only-change.txt | 0 .../providers/openlineage/CHANGELOG.rst | 0 .../providers/openlineage/__init__.py | 0 .../airflow}/providers/openlineage/conf.py | 0 .../openlineage/extractors/__init__.py | 0 .../providers/openlineage/extractors/base.py | 0 .../providers/openlineage/extractors/bash.py | 0 .../openlineage/extractors/manager.py | 0 .../openlineage/extractors/python.py | 0 .../facets/AirflowDagRunFacet.json | 0 .../facets/AirflowDebugRunFacet.json | 0 .../openlineage/facets/AirflowJobFacet.json | 0 .../openlineage/facets/AirflowRunFacet.json | 0 .../facets/AirflowStateRunFacet.json | 0 .../providers/openlineage/facets/__init__.py | 0 .../providers/openlineage/plugins/__init__.py | 0 .../providers/openlineage/plugins/adapter.py | 0 .../providers/openlineage/plugins/facets.py | 0 .../providers/openlineage/plugins/listener.py | 0 .../providers/openlineage/plugins/macros.py | 0 .../openlineage/plugins/openlineage.py | 0 .../providers/openlineage/provider.yaml | 0 .../providers/openlineage/sqlparser.py | 0 .../providers/openlineage/utils/__init__.py | 0 .../utils/asset_compat_lineage_collector.py | 0 .../openlineage/utils/selective_enable.py | 0 .../providers/openlineage/utils/sql.py | 0 .../providers/openlineage/utils/utils.py | 0 .../opensearch/.latest-doc-only-change.txt | 0 .../providers/opensearch/CHANGELOG.rst | 0 .../airflow}/providers/opensearch/__init__.py | 0 .../providers/opensearch/hooks/__init__.py | 0 .../providers/opensearch/hooks/opensearch.py | 0 .../providers/opensearch/log/__init__.py | 0 .../opensearch/log/os_json_formatter.py | 0 .../providers/opensearch/log/os_response.py | 0 .../opensearch/log/os_task_handler.py | 0 .../opensearch/operators/__init__.py | 0 .../opensearch/operators/opensearch.py | 0 .../providers/opensearch/provider.yaml | 0 .../opsgenie/.latest-doc-only-change.txt | 0 .../airflow}/providers/opsgenie/CHANGELOG.rst | 0 .../airflow}/providers/opsgenie/__init__.py | 0 .../providers/opsgenie/hooks}/__init__.py | 0 .../providers/opsgenie/hooks/opsgenie.py | 0 .../opsgenie/notifications/__init__.py | 0 .../opsgenie/notifications/opsgenie.py | 0 .../providers/opsgenie/operators}/__init__.py | 0 .../providers/opsgenie/operators/opsgenie.py | 0 .../airflow}/providers/opsgenie/provider.yaml | 0 .../providers/opsgenie/typing/__init__.py | 0 .../providers/opsgenie/typing/opsgenie.py | 0 .../oracle/.latest-doc-only-change.txt | 0 .../airflow}/providers/oracle/CHANGELOG.rst | 0 .../src/airflow}/providers/oracle/__init__.py | 0 .../providers/oracle/example_dags/__init__.py | 0 .../oracle/example_dags/example_oracle.py | 0 .../providers/oracle}/hooks/__init__.py | 0 .../airflow}/providers/oracle/hooks/oracle.py | 0 .../providers/oracle}/operators/__init__.py | 0 .../providers/oracle/operators/oracle.py | 0 .../airflow}/providers/oracle/provider.yaml | 0 .../providers/oracle/transfers/__init__.py | 0 .../oracle/transfers/oracle_to_oracle.py | 0 .../pagerduty/.latest-doc-only-change.txt | 0 .../providers/pagerduty/CHANGELOG.rst | 0 .../airflow}/providers/pagerduty/__init__.py | 0 .../providers/pagerduty}/hooks/__init__.py | 0 .../providers/pagerduty/hooks/pagerduty.py | 0 .../pagerduty/hooks/pagerduty_events.py | 0 .../pagerduty/notifications/__init__.py | 0 .../pagerduty/notifications/pagerduty.py | 0 .../providers/pagerduty/provider.yaml | 0 .../papermill/.latest-doc-only-change.txt | 0 .../providers/papermill/CHANGELOG.rst | 0 .../airflow}/providers/papermill/__init__.py | 0 .../providers/papermill/hooks}/__init__.py | 0 .../providers/papermill/hooks/kernel.py | 0 .../papermill/operators}/__init__.py | 0 .../papermill/operators/papermill.py | 0 .../providers/papermill/provider.yaml | 0 .../pgvector/.latest-doc-only-change.txt | 0 .../airflow}/providers/pgvector/CHANGELOG.rst | 0 .../airflow}/providers/pgvector/__init__.py | 0 .../providers/pgvector/hooks/__init__.py | 0 .../providers/pgvector/hooks/pgvector.py | 0 .../providers/pgvector/operators/__init__.py | 0 .../providers/pgvector/operators/pgvector.py | 0 .../airflow}/providers/pgvector/provider.yaml | 0 .../pinecone/.latest-doc-only-change.txt | 0 .../airflow}/providers/pinecone/CHANGELOG.rst | 0 .../airflow}/providers/pinecone/__init__.py | 0 .../providers/pinecone/hooks/__init__.py | 0 .../providers/pinecone/hooks/pinecone.py | 0 .../providers/pinecone/operators/__init__.py | 0 .../providers/pinecone/operators/pinecone.py | 0 .../airflow}/providers/pinecone/provider.yaml | 0 .../postgres/.latest-doc-only-change.txt | 0 .../airflow}/providers/postgres/CHANGELOG.rst | 0 .../airflow}/providers/postgres/__init__.py | 0 .../providers/postgres/assets/__init__.py | 0 .../providers/postgres/assets/postgres.py | 0 .../providers/postgres}/hooks/__init__.py | 0 .../providers/postgres/hooks/postgres.py | 0 .../providers/postgres}/operators/__init__.py | 0 .../providers/postgres/operators/postgres.py | 0 .../airflow}/providers/postgres/provider.yaml | 0 .../presto/.latest-doc-only-change.txt | 0 .../airflow}/providers/presto/CHANGELOG.rst | 0 .../src/airflow}/providers/presto/__init__.py | 0 .../providers/presto}/hooks/__init__.py | 0 .../airflow}/providers/presto/hooks/presto.py | 0 .../airflow}/providers/presto/provider.yaml | 0 .../providers/presto/transfers/__init__.py | 0 .../presto/transfers/gcs_to_presto.py | 0 .../qdrant/.latest-doc-only-change.txt | 0 .../airflow}/providers/qdrant/CHANGELOG.rst | 0 .../src/airflow}/providers/qdrant/__init__.py | 0 .../providers/qdrant/hooks/__init__.py | 0 .../airflow}/providers/qdrant/hooks/qdrant.py | 0 .../providers/qdrant/operators/__init__.py | 0 .../providers/qdrant/operators/qdrant.py | 0 .../airflow}/providers/qdrant/provider.yaml | 0 .../redis/.latest-doc-only-change.txt | 0 .../airflow}/providers/redis/CHANGELOG.rst | 0 .../src/airflow}/providers/redis/__init__.py | 0 .../providers/redis/hooks}/__init__.py | 0 .../airflow}/providers/redis/hooks/redis.py | 0 .../airflow/providers/redis/log}/__init__.py | 0 .../providers/redis/log/redis_task_handler.py | 0 .../providers/redis/operators}/__init__.py | 0 .../redis/operators/redis_publish.py | 0 .../airflow}/providers/redis/provider.yaml | 0 .../providers/redis/sensors}/__init__.py | 0 .../providers/redis/sensors/redis_key.py | 0 .../providers/redis/sensors/redis_pub_sub.py | 0 .../salesforce/.latest-doc-only-change.txt | 0 .../providers/salesforce/CHANGELOG.rst | 0 .../airflow}/providers/salesforce/__init__.py | 0 .../providers/salesforce/hooks}/__init__.py | 0 .../providers/salesforce/hooks/salesforce.py | 0 .../salesforce/operators/__init__.py | 0 .../providers/salesforce/operators/bulk.py | 0 .../operators/salesforce_apex_rest.py | 0 .../providers/salesforce/provider.yaml | 0 .../samba/.latest-doc-only-change.txt | 0 .../airflow}/providers/samba/CHANGELOG.rst | 0 .../src/airflow}/providers/samba/__init__.py | 0 .../providers/samba/hooks}/__init__.py | 0 .../airflow}/providers/samba/hooks/samba.py | 0 .../airflow}/providers/samba/provider.yaml | 0 .../providers/samba/transfers/__init__.py | 0 .../providers/samba/transfers/gcs_to_samba.py | 0 .../segment/.latest-doc-only-change.txt | 0 .../airflow}/providers/segment/CHANGELOG.rst | 0 .../airflow}/providers/segment/__init__.py | 0 .../providers/segment}/hooks/__init__.py | 0 .../providers/segment/hooks/segment.py | 0 .../providers/segment/operators}/__init__.py | 0 .../segment/operators/segment_track_event.py | 0 .../airflow}/providers/segment/provider.yaml | 0 .../sendgrid/.latest-doc-only-change.txt | 0 .../airflow}/providers/sendgrid/CHANGELOG.rst | 0 .../airflow}/providers/sendgrid/__init__.py | 0 .../airflow}/providers/sendgrid/provider.yaml | 0 .../providers/sendgrid/utils/__init__.py | 0 .../providers/sendgrid/utils/emailer.py | 0 .../sftp/.latest-doc-only-change.txt | 0 .../src/airflow}/providers/sftp/CHANGELOG.rst | 0 .../src/airflow}/providers/sftp/__init__.py | 0 .../providers/sftp/decorators/__init__.py | 0 .../sftp/decorators/sensors/__init__.py | 0 .../providers/sftp/decorators/sensors/sftp.py | 0 .../airflow}/providers/sftp/hooks/__init__.py | 0 .../src/airflow}/providers/sftp/hooks/sftp.py | 0 .../providers/sftp/operators/__init__.py | 0 .../airflow}/providers/sftp/operators/sftp.py | 0 .../src/airflow}/providers/sftp/provider.yaml | 0 .../providers/sftp/sensors/__init__.py | 0 .../airflow}/providers/sftp/sensors/sftp.py | 0 .../providers/sftp/triggers/__init__.py | 0 .../airflow}/providers/sftp/triggers/sftp.py | 0 .../singularity/.latest-doc-only-change.txt | 0 .../providers/singularity/CHANGELOG.rst | 0 .../providers/singularity/__init__.py | 0 .../singularity/operators}/__init__.py | 0 .../singularity/operators/singularity.py | 0 .../providers/singularity/provider.yaml | 0 .../slack/.latest-doc-only-change.txt | 0 .../airflow}/providers/slack/CHANGELOG.rst | 0 .../src/airflow}/providers/slack/__init__.py | 0 .../providers/slack/hooks}/__init__.py | 0 .../airflow}/providers/slack/hooks/slack.py | 0 .../providers/slack/hooks/slack_webhook.py | 0 .../providers/slack/notifications/__init__.py | 0 .../providers/slack/notifications/slack.py | 0 .../slack/notifications/slack_notifier.py | 0 .../slack/notifications/slack_webhook.py | 0 .../providers/slack}/operators/__init__.py | 0 .../providers/slack/operators/slack.py | 0 .../slack/operators/slack_webhook.py | 0 .../airflow}/providers/slack/provider.yaml | 0 .../providers/slack/transfers/__init__.py | 0 .../slack/transfers/base_sql_to_slack.py | 0 .../providers/slack/transfers/sql_to_slack.py | 0 .../slack/transfers/sql_to_slack_webhook.py | 0 .../providers/slack/utils/__init__.py | 0 .../smtp/.latest-doc-only-change.txt | 0 .../src/airflow}/providers/smtp/CHANGELOG.rst | 0 .../src/airflow}/providers/smtp/__init__.py | 0 .../airflow/providers/smtp}/hooks/__init__.py | 0 .../src/airflow}/providers/smtp/hooks/smtp.py | 0 .../providers/smtp/notifications/__init__.py | 0 .../providers/smtp/notifications/smtp.py | 0 .../smtp/notifications/templates/__init__.py | 0 .../smtp/notifications/templates/email.html | 0 .../templates/email_subject.jinja2 | 0 .../providers/smtp}/operators/__init__.py | 0 .../airflow}/providers/smtp/operators/smtp.py | 0 .../src/airflow}/providers/smtp/provider.yaml | 0 .../snowflake/.latest-doc-only-change.txt | 0 .../providers/snowflake/CHANGELOG.rst | 0 .../airflow}/providers/snowflake/__init__.py | 0 .../snowflake/decorators/__init__.py | 0 .../snowflake/decorators/snowpark.py | 0 .../providers/snowflake/hooks/__init__.py | 0 .../providers/snowflake/hooks/snowflake.py | 0 .../snowflake/hooks/snowflake_sql_api.py | 7 +- .../providers/snowflake/operators/__init__.py | 0 .../snowflake/operators/snowflake.py | 0 .../providers/snowflake/operators/snowpark.py | 0 .../providers/snowflake/provider.yaml | 0 .../providers/snowflake/transfers/__init__.py | 0 .../transfers/copy_into_snowflake.py | 0 .../providers/snowflake/triggers/__init__.py | 0 .../snowflake/triggers/snowflake_trigger.py | 0 .../providers/snowflake/utils/__init__.py | 0 .../providers/snowflake/utils/common.py | 0 .../providers/snowflake/utils/openlineage.py | 0 .../providers/snowflake/utils/snowpark.py | 0 .../snowflake/utils/sql_api_generate_jwt.py | 0 .../sqlite/.latest-doc-only-change.txt | 0 .../airflow}/providers/sqlite/CHANGELOG.rst | 0 .../src/airflow}/providers/sqlite/__init__.py | 0 .../providers/sqlite}/hooks/__init__.py | 0 .../airflow}/providers/sqlite/hooks/sqlite.py | 0 .../providers/sqlite}/operators/__init__.py | 0 .../providers/sqlite/operators/sqlite.py | 0 .../airflow}/providers/sqlite/provider.yaml | 0 .../providers/ssh/.latest-doc-only-change.txt | 0 .../src/airflow}/providers/ssh/CHANGELOG.rst | 0 .../src/airflow}/providers/ssh/__init__.py | 0 .../airflow/providers/ssh}/hooks/__init__.py | 0 .../src/airflow}/providers/ssh/hooks/ssh.py | 0 .../providers/ssh}/operators/__init__.py | 0 .../airflow}/providers/ssh/operators/ssh.py | 0 .../src/airflow}/providers/ssh/provider.yaml | 0 .../airflow}/providers/standard/CHANGELOG.rst | 0 .../airflow/providers/standard}/__init__.py | 0 .../providers/standard/operators/__init__.py | 0 .../providers/standard/operators/bash.py | 0 .../providers/standard/operators/datetime.py | 0 .../providers/standard/operators/weekday.py | 0 .../airflow}/providers/standard/provider.yaml | 0 .../providers/standard/sensors/__init__.py | 0 .../providers/standard/sensors/bash.py | 0 .../providers/standard/sensors/date_time.py | 0 .../providers/standard/sensors/time.py | 0 .../providers/standard/sensors/time_delta.py | 0 .../providers/standard/sensors/weekday.py | 0 .../tableau/.latest-doc-only-change.txt | 0 .../airflow}/providers/tableau/CHANGELOG.rst | 0 .../airflow}/providers/tableau/__init__.py | 0 .../providers/tableau/hooks}/__init__.py | 0 .../providers/tableau/hooks/tableau.py | 0 .../providers/tableau/operators/__init__.py | 0 .../providers/tableau/operators/tableau.py | 0 .../airflow}/providers/tableau/provider.yaml | 0 .../providers/tableau/sensors/__init__.py | 0 .../providers/tableau/sensors/tableau.py | 0 .../telegram/.latest-doc-only-change.txt | 0 .../airflow}/providers/telegram/CHANGELOG.rst | 0 .../airflow}/providers/telegram/__init__.py | 0 .../providers/telegram/hooks}/__init__.py | 0 .../providers/telegram/hooks/telegram.py | 0 .../providers/telegram/operators}/__init__.py | 0 .../providers/telegram/operators/telegram.py | 0 .../airflow}/providers/telegram/provider.yaml | 0 .../teradata/.latest-doc-only-change.txt | 0 .../airflow}/providers/teradata/CHANGELOG.rst | 0 .../airflow}/providers/teradata/__init__.py | 0 .../providers/teradata}/hooks/__init__.py | 0 .../providers/teradata/hooks/teradata.py | 0 .../providers/teradata}/operators/__init__.py | 0 .../providers/teradata/operators/teradata.py | 0 .../operators/teradata_compute_cluster.py | 0 .../airflow}/providers/teradata/provider.yaml | 0 .../providers/teradata/transfers/__init__.py | 0 .../transfers/azure_blob_to_teradata.py | 0 .../teradata/transfers/s3_to_teradata.py | 0 .../transfers/teradata_to_teradata.py | 0 .../providers/teradata/triggers/__init__.py | 0 .../triggers/teradata_compute_cluster.py | 0 .../providers/teradata/utils/__init__.py | 0 .../providers/teradata/utils/constants.py | 0 .../trino/.latest-doc-only-change.txt | 0 .../airflow}/providers/trino/CHANGELOG.rst | 0 .../src/airflow}/providers/trino/__init__.py | 0 .../providers/trino/assets/__init__.py | 0 .../airflow}/providers/trino/assets/trino.py | 0 .../providers/trino}/hooks/__init__.py | 0 .../airflow}/providers/trino/hooks/trino.py | 0 .../providers/trino/operators/__init__.py | 0 .../providers/trino/operators/trino.py | 0 .../airflow}/providers/trino/provider.yaml | 0 .../providers/trino/transfers}/__init__.py | 0 .../providers/trino/transfers/gcs_to_trino.py | 0 .../vertica/.latest-doc-only-change.txt | 0 .../airflow}/providers/vertica/CHANGELOG.rst | 0 .../airflow}/providers/vertica/__init__.py | 0 .../providers/vertica}/hooks/__init__.py | 0 .../providers/vertica/hooks/vertica.py | 0 .../providers/vertica/operators}/__init__.py | 0 .../providers/vertica/operators/vertica.py | 0 .../airflow}/providers/vertica/provider.yaml | 0 .../weaviate/.latest-doc-only-change.txt | 0 .../airflow}/providers/weaviate/CHANGELOG.rst | 0 .../airflow}/providers/weaviate/__init__.py | 0 .../providers/weaviate/hooks/__init__.py | 0 .../providers/weaviate/hooks/weaviate.py | 0 .../providers/weaviate/operators/__init__.py | 0 .../providers/weaviate/operators/weaviate.py | 0 .../airflow}/providers/weaviate/provider.yaml | 0 .../yandex/.latest-doc-only-change.txt | 0 .../airflow}/providers/yandex/CHANGELOG.rst | 0 .../src/airflow}/providers/yandex/__init__.py | 0 .../providers/yandex/hooks/__init__.py | 0 .../providers/yandex/hooks/dataproc.py | 0 .../airflow}/providers/yandex/hooks/yandex.py | 0 .../yandex/hooks/yandexcloud_dataproc.py | 0 .../src/airflow}/providers/yandex/hooks/yq.py | 0 .../providers/yandex/links/__init__.py | 0 .../src/airflow}/providers/yandex/links/yq.py | 0 .../providers/yandex/operators/__init__.py | 0 .../providers/yandex/operators/dataproc.py | 0 .../yandex/operators/yandexcloud_dataproc.py | 0 .../airflow}/providers/yandex/operators/yq.py | 0 .../airflow}/providers/yandex/provider.yaml | 0 .../providers/yandex/secrets/__init__.py | 0 .../providers/yandex/secrets/lockbox.py | 0 .../providers/yandex/utils/__init__.py | 0 .../providers/yandex/utils/credentials.py | 0 .../providers/yandex/utils/defaults.py | 0 .../airflow}/providers/yandex/utils/fields.py | 0 .../providers/yandex/utils/user_agent.py | 0 .../src/airflow}/providers/ydb/CHANGELOG.rst | 0 .../src/airflow}/providers/ydb/__init__.py | 0 .../airflow}/providers/ydb/hooks/__init__.py | 0 .../providers/ydb/hooks/_vendor/__init__.py | 0 .../ydb/hooks/_vendor/dbapi/__init__.py | 0 .../ydb/hooks/_vendor/dbapi/connection.py | 0 .../ydb/hooks/_vendor/dbapi/constants.py | 0 .../ydb/hooks/_vendor/dbapi/cursor.py | 0 .../ydb/hooks/_vendor/dbapi/errors.py | 0 .../providers/ydb/hooks/_vendor/readme.md | 0 .../src/airflow}/providers/ydb/hooks/ydb.py | 0 .../providers/ydb/operators/__init__.py | 0 .../airflow}/providers/ydb/operators/ydb.py | 0 .../src/airflow}/providers/ydb/provider.yaml | 0 .../airflow}/providers/ydb/utils/__init__.py | 0 .../providers/ydb/utils/credentials.py | 0 .../airflow}/providers/ydb/utils/defaults.py | 0 .../zendesk/.latest-doc-only-change.txt | 0 .../airflow}/providers/zendesk/CHANGELOG.rst | 0 .../airflow}/providers/zendesk/__init__.py | 0 .../providers/zendesk}/hooks/__init__.py | 0 .../providers/zendesk/hooks/zendesk.py | 0 .../airflow}/providers/zendesk/provider.yaml | 0 .../tests}/__init__.py | 0 .../tests/airbyte}/__init__.py | 0 .../tests/airbyte/hooks}/__init__.py | 0 .../tests}/airbyte/hooks/test_airbyte.py | 0 .../tests/airbyte/operators}/__init__.py | 0 .../tests}/airbyte/operators/test_airbyte.py | 0 .../tests/airbyte/sensors}/__init__.py | 0 .../tests}/airbyte/sensors/test_airbyte.py | 0 .../tests/airbyte/triggers}/__init__.py | 0 .../tests}/airbyte/triggers/test_airbyte.py | 0 .../tests/alibaba}/__init__.py | 0 .../tests/alibaba/cloud}/__init__.py | 0 .../tests/alibaba/cloud/hooks}/__init__.py | 0 .../cloud/hooks/test_analyticdb_spark.py | 3 +- .../tests}/alibaba/cloud/hooks/test_oss.py | 3 +- .../tests/alibaba/cloud/log}/__init__.py | 0 .../cloud/log/test_oss_task_handler.py | 5 +- .../alibaba/cloud/operators}/__init__.py | 0 .../cloud/operators/test_analyticdb_spark.py | 0 .../alibaba/cloud/operators/test_oss.py | 0 .../tests/alibaba/cloud/sensors}/__init__.py | 0 .../cloud/sensors/test_analyticdb_spark.py | 0 .../alibaba/cloud/sensors/test_oss_key.py | 0 .../tests/alibaba/cloud/utils}/__init__.py | 0 .../cloud/utils/analyticdb_spark_mock.py | 0 .../tests}/alibaba/cloud/utils/oss_mock.py | 0 .../tests}/alibaba/cloud/utils/test_utils.py | 0 .../tests/amazon}/__init__.py | 0 .../tests}/amazon/aws/.gitignore | 0 .../tests/amazon/aws}/__init__.py | 0 .../tests/amazon/aws/assets}/__init__.py | 0 .../tests}/amazon/aws/assets/test_s3.py | 0 .../amazon/aws/auth_manager}/__init__.py | 0 .../amazon/aws/auth_manager/avp}/__init__.py | 0 .../aws/auth_manager/avp/test_entities.py | 0 .../aws/auth_manager/avp/test_facade.py | 11 +- .../amazon/aws/auth_manager/cli}/__init__.py | 0 .../aws/auth_manager/cli/test_avp_commands.py | 5 +- .../aws/auth_manager/cli/test_definition.py | 0 .../security_manager}/__init__.py | 0 .../test_aws_security_manager_override.py | 2 +- .../aws/auth_manager/test_aws_auth_manager.py | 26 +- .../amazon/aws/auth_manager/test_constants.py | 0 .../amazon/aws/auth_manager/test_user.py | 0 .../aws/auth_manager/views}/__init__.py | 0 .../aws/auth_manager/views/test_auth.py | 54 +- .../amazon/aws/config_templates}/__init__.py | 0 .../amazon/aws/config_templates/args.json | 0 .../amazon/aws/config_templates/job.j2.json | 0 .../amazon/aws/config_templates/steps.j2.json | 0 .../amazon/aws/config_templates/steps.json | 0 .../tests/amazon/aws/deferrable}/__init__.py | 0 .../amazon/aws/deferrable/hooks}/__init__.py | 0 .../tests/amazon/aws/executors}/__init__.py | 0 .../amazon/aws/executors/batch}/__init__.py | 0 .../executors/batch/test_batch_executor.py | 11 +- .../amazon/aws/executors/ecs}/__init__.py | 0 .../aws/executors/ecs/test_ecs_executor.py | 13 +- .../amazon/aws/executors/utils}/__init__.py | 0 .../utils/test_exponential_backoff_retry.py | 0 .../tests/amazon/aws/fs}/__init__.py | 0 .../tests}/amazon/aws/fs/test_s3.py | 0 .../tests/amazon/aws/hooks}/__init__.py | 0 .../tests}/amazon/aws/hooks/test_appflow.py | 0 .../tests}/amazon/aws/hooks/test_athena.py | 0 .../amazon/aws/hooks/test_athena_sql.py | 0 .../tests}/amazon/aws/hooks/test_base_aws.py | 33 +- .../amazon/aws/hooks/test_batch_client.py | 12 +- .../amazon/aws/hooks/test_batch_waiters.py | 13 +- .../tests}/amazon/aws/hooks/test_bedrock.py | 0 .../tests}/amazon/aws/hooks/test_chime.py | 0 .../amazon/aws/hooks/test_cloud_formation.py | 0 .../amazon/aws/hooks/test_comprehend.py | 0 .../tests}/amazon/aws/hooks/test_datasync.py | 0 .../tests}/amazon/aws/hooks/test_dms.py | 0 .../tests}/amazon/aws/hooks/test_dynamodb.py | 0 .../tests}/amazon/aws/hooks/test_ec2.py | 0 .../tests}/amazon/aws/hooks/test_ecr.py | 0 .../tests}/amazon/aws/hooks/test_ecs.py | 0 .../tests}/amazon/aws/hooks/test_eks.py | 5 +- .../test_elasticache_replication_group.py | 0 .../tests}/amazon/aws/hooks/test_emr.py | 0 .../amazon/aws/hooks/test_emr_containers.py | 0 .../amazon/aws/hooks/test_emr_serverless.py | 0 .../amazon/aws/hooks/test_eventbridge.py | 0 .../tests}/amazon/aws/hooks/test_glacier.py | 0 .../tests}/amazon/aws/hooks/test_glue.py | 0 .../amazon/aws/hooks/test_glue_catalog.py | 0 .../amazon/aws/hooks/test_glue_crawler.py | 0 .../amazon/aws/hooks/test_glue_databrew.py | 0 .../amazon/aws/hooks/test_hooks_signature.py | 0 .../tests}/amazon/aws/hooks/test_kinesis.py | 0 .../aws/hooks/test_kinesis_analytics.py | 0 .../amazon/aws/hooks/test_lambda_function.py | 0 .../tests}/amazon/aws/hooks/test_logs.py | 0 .../tests}/amazon/aws/hooks/test_neptune.py | 0 .../aws/hooks/test_opensearch_serverless.py | 0 .../amazon/aws/hooks/test_quicksight.py | 0 .../tests}/amazon/aws/hooks/test_rds.py | 0 .../amazon/aws/hooks/test_redshift_cluster.py | 0 .../amazon/aws/hooks/test_redshift_data.py | 0 .../amazon/aws/hooks/test_redshift_sql.py | 0 .../tests}/amazon/aws/hooks/test_s3.py | 3 +- .../tests}/amazon/aws/hooks/test_sagemaker.py | 0 .../amazon/aws/hooks/test_secrets_manager.py | 0 .../tests}/amazon/aws/hooks/test_ses.py | 0 .../tests}/amazon/aws/hooks/test_sns.py | 0 .../tests}/amazon/aws/hooks/test_sqs.py | 0 .../tests}/amazon/aws/hooks/test_ssm.py | 0 .../amazon/aws/hooks/test_step_function.py | 0 .../tests}/amazon/aws/hooks/test_sts.py | 0 .../aws/hooks/test_verified_permissions.py | 0 .../amazon/aws/infrastructure}/__init__.py | 0 .../example_s3_to_redshift}/__init__.py | 0 .../example_s3_to_redshift/outputs.tf | 0 .../example_s3_to_redshift/resources.tf | 0 .../example_s3_to_redshift/variables.tf | 0 .../tests/amazon/aws/links}/__init__.py | 0 .../tests}/amazon/aws/links/test_athena.py | 3 +- .../tests}/amazon/aws/links/test_base_aws.py | 5 +- .../tests}/amazon/aws/links/test_batch.py | 3 +- .../tests}/amazon/aws/links/test_emr.py | 3 +- .../tests}/amazon/aws/links/test_glue.py | 3 +- .../tests}/amazon/aws/links/test_logs.py | 3 +- .../amazon/aws/links/test_step_function.py | 3 +- .../tests/amazon/aws/log}/__init__.py | 0 .../aws/log/test_cloudwatch_task_handler.py | 3 +- .../amazon/aws/log/test_s3_task_handler.py | 3 +- .../amazon/aws/notifications}/__init__.py | 0 .../amazon/aws/notifications/test_chime.py | 0 .../amazon/aws/notifications/test_sns.py | 0 .../amazon/aws/notifications/test_sqs.py | 0 .../tests/amazon/aws/operators}/__init__.py | 0 .../amazon/aws/operators/athena_metadata.json | 0 .../amazon/aws/operators/test_appflow.py | 0 .../amazon/aws/operators/test_athena.py | 6 +- .../amazon/aws/operators/test_base_aws.py | 0 .../tests}/amazon/aws/operators/test_batch.py | 0 .../amazon/aws/operators/test_bedrock.py | 3 +- .../aws/operators/test_cloud_formation.py | 3 +- .../amazon/aws/operators/test_comprehend.py | 3 +- .../amazon/aws/operators/test_datasync.py | 3 +- .../tests}/amazon/aws/operators/test_dms.py | 3 +- .../tests}/amazon/aws/operators/test_ec2.py | 3 +- .../tests}/amazon/aws/operators/test_ecs.py | 3 +- .../tests}/amazon/aws/operators/test_eks.py | 9 +- .../aws/operators/test_emr_add_steps.py | 10 +- .../aws/operators/test_emr_containers.py | 3 +- .../aws/operators/test_emr_create_job_flow.py | 12 +- .../aws/operators/test_emr_modify_cluster.py | 3 +- .../operators/test_emr_notebook_execution.py | 5 +- .../aws/operators/test_emr_serverless.py | 3 +- .../operators/test_emr_terminate_job_flow.py | 3 +- .../amazon/aws/operators/test_eventbridge.py | 3 +- .../amazon/aws/operators/test_glacier.py | 3 +- .../tests}/amazon/aws/operators/test_glue.py | 3 +- .../amazon/aws/operators/test_glue_crawler.py | 3 +- .../aws/operators/test_glue_databrew.py | 3 +- .../aws/operators/test_kinesis_analytics.py | 3 +- .../aws/operators/test_lambda_function.py | 3 +- .../amazon/aws/operators/test_neptune.py | 3 +- .../amazon/aws/operators/test_quicksight.py | 3 +- .../tests}/amazon/aws/operators/test_rds.py | 3 +- .../aws/operators/test_redshift_cluster.py | 3 +- .../aws/operators/test_redshift_data.py | 3 +- .../amazon/aws/operators/test_redshift_sql.py | 0 .../tests}/amazon/aws/operators/test_s3.py | 3 +- .../aws/operators/test_sagemaker_base.py | 3 +- .../aws/operators/test_sagemaker_endpoint.py | 3 +- .../test_sagemaker_endpoint_config.py | 3 +- .../aws/operators/test_sagemaker_model.py | 3 +- .../aws/operators/test_sagemaker_notebook.py | 3 +- .../aws/operators/test_sagemaker_pipeline.py | 3 +- .../operators/test_sagemaker_processing.py | 3 +- .../aws/operators/test_sagemaker_training.py | 3 +- .../aws/operators/test_sagemaker_transform.py | 3 +- .../aws/operators/test_sagemaker_tuning.py | 3 +- .../tests}/amazon/aws/operators/test_sns.py | 3 +- .../tests}/amazon/aws/operators/test_sqs.py | 3 +- .../aws/operators/test_step_function.py | 16 +- .../tests/amazon/aws/secrets}/__init__.py | 0 .../aws/secrets/test_secrets_manager.py | 0 .../aws/secrets/test_systems_manager.py | 3 +- .../tests/amazon/aws/sensors}/__init__.py | 0 .../tests}/amazon/aws/sensors/test_athena.py | 0 .../amazon/aws/sensors/test_base_aws.py | 0 .../tests}/amazon/aws/sensors/test_batch.py | 0 .../tests}/amazon/aws/sensors/test_bedrock.py | 0 .../aws/sensors/test_cloud_formation.py | 0 .../amazon/aws/sensors/test_comprehend.py | 0 .../tests}/amazon/aws/sensors/test_dms.py | 0 .../amazon/aws/sensors/test_dynamodb.py | 0 .../tests}/amazon/aws/sensors/test_ec2.py | 0 .../tests}/amazon/aws/sensors/test_ecs.py | 0 .../tests}/amazon/aws/sensors/test_eks.py | 0 .../amazon/aws/sensors/test_emr_base.py | 0 .../amazon/aws/sensors/test_emr_containers.py | 0 .../amazon/aws/sensors/test_emr_job_flow.py | 0 .../sensors/test_emr_notebook_execution.py | 0 .../test_emr_serverless_application.py | 0 .../aws/sensors/test_emr_serverless_job.py | 0 .../amazon/aws/sensors/test_emr_step.py | 0 .../tests}/amazon/aws/sensors/test_glacier.py | 0 .../tests}/amazon/aws/sensors/test_glue.py | 0 .../sensors/test_glue_catalog_partition.py | 0 .../amazon/aws/sensors/test_glue_crawler.py | 0 .../aws/sensors/test_glue_data_quality.py | 0 .../aws/sensors/test_kinesis_analytics.py | 0 .../aws/sensors/test_lambda_function.py | 0 .../aws/sensors/test_opensearch_serverless.py | 0 .../amazon/aws/sensors/test_quicksight.py | 0 .../tests}/amazon/aws/sensors/test_rds.py | 0 .../aws/sensors/test_redshift_cluster.py | 0 .../tests}/amazon/aws/sensors/test_s3.py | 0 .../aws/sensors/test_sagemaker_automl.py | 0 .../amazon/aws/sensors/test_sagemaker_base.py | 0 .../aws/sensors/test_sagemaker_endpoint.py | 0 .../aws/sensors/test_sagemaker_pipeline.py | 0 .../aws/sensors/test_sagemaker_training.py | 0 .../aws/sensors/test_sagemaker_transform.py | 0 .../aws/sensors/test_sagemaker_tuning.py | 0 .../tests}/amazon/aws/sensors/test_sqs.py | 0 .../amazon/aws/sensors/test_step_function.py | 0 .../tests/amazon/aws/system}/__init__.py | 0 .../amazon/aws/system/utils}/__init__.py | 0 .../amazon/aws/system/utils/test_helpers.py | 6 +- .../tests/amazon/aws/transfers}/__init__.py | 0 .../aws/transfers/test_azure_blob_to_s3.py | 0 .../tests}/amazon/aws/transfers/test_base.py | 0 .../aws/transfers/test_dynamodb_to_s3.py | 0 .../amazon/aws/transfers/test_exasol_to_s3.py | 0 .../amazon/aws/transfers/test_ftp_to_s3.py | 0 .../amazon/aws/transfers/test_gcs_to_s3.py | 0 .../aws/transfers/test_glacier_to_gcs.py | 0 .../aws/transfers/test_google_api_to_s3.py | 0 .../aws/transfers/test_hive_to_dynamodb.py | 0 .../amazon/aws/transfers/test_http_to_s3.py | 0 .../transfers/test_imap_attachment_to_s3.py | 0 .../amazon/aws/transfers/test_local_to_s3.py | 0 .../amazon/aws/transfers/test_mongo_to_s3.py | 0 .../aws/transfers/test_redshift_to_s3.py | 3 +- .../aws/transfers/test_s3_to_dynamodb.py | 0 .../amazon/aws/transfers/test_s3_to_ftp.py | 0 .../aws/transfers/test_s3_to_redshift.py | 3 +- .../amazon/aws/transfers/test_s3_to_sftp.py | 3 +- .../amazon/aws/transfers/test_s3_to_sql.py | 0 .../aws/transfers/test_salesforce_to_s3.py | 0 .../amazon/aws/transfers/test_sftp_to_s3.py | 3 +- .../amazon/aws/transfers/test_sql_to_s3.py | 0 .../tests/amazon/aws/triggers}/__init__.py | 0 .../tests}/amazon/aws/triggers/test_athena.py | 0 .../tests}/amazon/aws/triggers/test_base.py | 0 .../tests}/amazon/aws/triggers/test_batch.py | 0 .../amazon/aws/triggers/test_bedrock.py | 3 +- .../amazon/aws/triggers/test_comprehend.py | 3 +- .../tests}/amazon/aws/triggers/test_ec2.py | 0 .../tests}/amazon/aws/triggers/test_ecs.py | 0 .../tests}/amazon/aws/triggers/test_eks.py | 0 .../tests}/amazon/aws/triggers/test_emr.py | 0 .../tests}/amazon/aws/triggers/test_glue.py | 3 +- .../amazon/aws/triggers/test_glue_crawler.py | 3 +- .../amazon/aws/triggers/test_glue_databrew.py | 0 .../aws/triggers/test_kinesis_analytics.py | 3 +- .../aws/triggers/test_lambda_function.py | 0 .../amazon/aws/triggers/test_neptune.py | 0 .../triggers/test_opensearch_serverless.py | 3 +- .../tests}/amazon/aws/triggers/test_rds.py | 0 .../aws/triggers/test_redshift_cluster.py | 0 .../amazon/aws/triggers/test_redshift_data.py | 0 .../tests}/amazon/aws/triggers/test_s3.py | 0 .../amazon/aws/triggers/test_sagemaker.py | 0 .../amazon/aws/triggers/test_serialization.py | 0 .../tests}/amazon/aws/triggers/test_sqs.py | 0 .../tests/amazon/aws/utils}/__init__.py | 0 .../amazon/aws/utils/eks_test_constants.py | 0 .../tests}/amazon/aws/utils/eks_test_utils.py | 2 +- .../aws/utils/test_connection_wrapper.py | 0 .../amazon/aws/utils/test_eks_get_token.py | 4 +- .../tests}/amazon/aws/utils/test_emailer.py | 0 .../amazon/aws/utils/test_identifiers.py | 0 .../tests}/amazon/aws/utils/test_mixins.py | 0 .../amazon/aws/utils/test_openlineage.py | 0 .../tests}/amazon/aws/utils/test_redshift.py | 0 .../tests}/amazon/aws/utils/test_sqs.py | 0 .../tests}/amazon/aws/utils/test_suppress.py | 0 .../tests}/amazon/aws/utils/test_tags.py | 0 .../amazon/aws/utils/test_task_log_fetcher.py | 0 .../amazon/aws/utils/test_template_fields.py | 0 .../tests}/amazon/aws/utils/test_utils.py | 0 .../tests}/amazon/aws/utils/test_waiter.py | 0 .../aws/utils/test_waiter_with_logging.py | 0 .../tests/amazon/aws/waiters}/__init__.py | 0 .../tests}/amazon/aws/waiters/test.json | 0 .../tests}/amazon/aws/waiters/test_batch.py | 0 .../tests}/amazon/aws/waiters/test_bedrock.py | 0 .../amazon/aws/waiters/test_bedrock_agent.py | 0 .../amazon/aws/waiters/test_comprehend.py | 0 .../amazon/aws/waiters/test_custom_waiters.py | 0 .../tests}/amazon/aws/waiters/test_dynamo.py | 0 .../tests}/amazon/aws/waiters/test_ecs.py | 0 .../tests}/amazon/aws/waiters/test_eks.py | 3 +- .../tests}/amazon/aws/waiters/test_emr.py | 0 .../tests}/amazon/aws/waiters/test_glue.py | 0 .../amazon/aws/waiters/test_glue_databrew.py | 0 .../aws/waiters/test_kinesis_analytics.py | 0 .../tests}/amazon/aws/waiters/test_neptune.py | 0 .../aws/waiters/test_opensearch_serverless.py | 0 .../tests}/amazon/conftest.py | 0 .../tests/apache}/__init__.py | 0 .../tests/apache/beam}/__init__.py | 0 .../tests/apache/beam/hooks}/__init__.py | 0 .../tests}/apache/beam/hooks/test_beam.py | 0 .../tests/apache/beam/operators}/__init__.py | 0 .../tests}/apache/beam/operators/test_beam.py | 0 .../tests/apache/beam/triggers}/__init__.py | 0 .../tests}/apache/beam/triggers/test_beam.py | 0 .../tests/apache/cassandra}/__init__.py | 0 .../apache/cassandra}/sensors/__init__.py | 0 .../apache/cassandra/sensors/test_record.py | 0 .../apache/cassandra/sensors/test_table.py | 0 .../tests/apache/drill}/__init__.py | 0 .../tests/apache/drill}/hooks/__init__.py | 0 .../tests}/apache/drill/hooks/test_drill.py | 7 +- .../tests/apache/druid}/__init__.py | 0 .../tests/apache/druid}/hooks/__init__.py | 0 .../tests}/apache/druid/hooks/test_druid.py | 0 .../tests/apache/druid/operators}/__init__.py | 0 .../apache/druid/operators/test_druid.py | 0 .../tests/apache/druid/transfers}/__init__.py | 0 .../druid/transfers/test_hive_to_druid.py | 0 .../tests/apache/flink}/__init__.py | 0 .../tests/apache/flink}/operators/__init__.py | 0 .../flink/operators/test_flink_kubernetes.py | 0 .../tests/apache/flink/sensors}/__init__.py | 0 .../flink/sensors/test_flink_kubernetes.py | 0 .../tests/apache/hdfs}/__init__.py | 0 .../tests/apache/hdfs/hooks}/__init__.py | 0 .../tests}/apache/hdfs/hooks/test_webhdfs.py | 0 .../tests/apache/hdfs/sensors}/__init__.py | 0 .../apache/hdfs/sensors/test_web_hdfs.py | 0 .../tests}/apache/hive/__init__.py | 0 .../tests/apache/hive}/hooks/__init__.py | 0 .../apache/hive/hooks/query_results.csv | 0 .../tests}/apache/hive/hooks/test_hive.py | 27 +- .../tests/apache/hive/macros}/__init__.py | 0 .../tests}/apache/hive/macros/test_hive.py | 0 .../tests/apache/hive/operators}/__init__.py | 0 .../tests}/apache/hive/operators/test_hive.py | 3 +- .../apache/hive/operators/test_hive_stats.py | 3 +- .../tests/apache/hive/sensors}/__init__.py | 0 .../hive/sensors/test_hive_partition.py | 3 +- .../hive/sensors/test_metastore_partition.py | 3 +- .../hive/sensors/test_named_hive_partition.py | 3 +- .../tests/apache/hive/transfers}/__init__.py | 0 .../hive/transfers/test_hive_to_mysql.py | 3 +- .../hive/transfers/test_hive_to_samba.py | 3 +- .../hive/transfers/test_mssql_to_hive.py | 0 .../hive/transfers/test_mysql_to_hive.py | 0 .../apache/hive/transfers/test_s3_to_hive.py | 0 .../hive/transfers/test_vertica_to_hive.py | 0 .../tests/apache/iceberg}/__init__.py | 0 .../tests/apache/iceberg/hooks}/__init__.py | 0 .../apache/iceberg/hooks/test_iceberg.py | 0 .../tests/apache/impala}/__init__.py | 0 .../tests/apache/impala/hooks}/__init__.py | 0 .../tests}/apache/impala/hooks/test_impala.py | 0 .../tests/apache/kafka}/__init__.py | 0 .../tests/apache/kafka/hooks}/__init__.py | 0 .../tests}/apache/kafka/hooks/test_base.py | 0 .../tests}/apache/kafka/hooks/test_client.py | 0 .../tests}/apache/kafka/hooks/test_consume.py | 0 .../tests}/apache/kafka/hooks/test_produce.py | 0 .../tests/apache/kafka/operators}/__init__.py | 0 .../apache/kafka/operators/test_consume.py | 2 +- .../apache/kafka/operators/test_produce.py | 2 +- .../tests/apache/kafka/sensors}/__init__.py | 0 .../tests}/apache/kafka/sensors/test_kafka.py | 0 .../tests/apache/kafka/triggers}/__init__.py | 0 .../kafka/triggers/test_await_message.py | 4 +- .../tests/apache/kylin}/__init__.py | 0 .../tests/apache/kylin/hooks}/__init__.py | 0 .../tests}/apache/kylin/hooks/test_kylin.py | 0 .../tests/apache/kylin/operators}/__init__.py | 0 .../apache/kylin/operators/test_kylin_cube.py | 0 .../tests/apache/livy}/__init__.py | 0 .../tests/apache/livy/hooks}/__init__.py | 0 .../tests}/apache/livy/hooks/test_livy.py | 3 +- .../tests/apache/livy/operators}/__init__.py | 0 .../tests}/apache/livy/operators/test_livy.py | 0 .../tests/apache/livy/sensors}/__init__.py | 0 .../tests}/apache/livy/sensors/test_livy.py | 0 .../tests/apache/livy/triggers}/__init__.py | 0 .../tests}/apache/livy/triggers/test_livy.py | 0 .../tests/apache/pig}/__init__.py | 0 .../tests/apache/pig/hooks}/__init__.py | 0 .../tests}/apache/pig/hooks/test_pig.py | 0 .../tests/apache/pig/operators}/__init__.py | 0 .../tests}/apache/pig/operators/test_pig.py | 0 .../tests/apache/pinot}/__init__.py | 0 .../tests/apache/pinot}/hooks/__init__.py | 0 .../tests}/apache/pinot/hooks/test_pinot.py | 0 .../tests/apache/spark}/__init__.py | 0 .../apache/spark/decorators}/__init__.py | 0 .../apache/spark/decorators/test_pyspark.py | 0 .../tests/apache/spark}/hooks/__init__.py | 0 .../apache/spark/hooks/test_spark_connect.py | 0 .../apache/spark/hooks/test_spark_jdbc.py | 0 .../spark/hooks/test_spark_jdbc_script.py | 0 .../apache/spark/hooks/test_spark_sql.py | 3 +- .../apache/spark/hooks/test_spark_submit.py | 0 .../tests/apache/spark/operators}/__init__.py | 0 .../apache/spark/operators/test_spark_jdbc.py | 0 .../apache/spark/operators/test_spark_sql.py | 0 .../spark/operators/test_spark_submit.py | 0 .../tests/apprise}/__init__.py | 0 .../tests/apprise}/hooks/__init__.py | 0 .../tests}/apprise/hooks/test_apprise.py | 0 .../tests/apprise/notifications}/__init__.py | 0 .../apprise/notifications/test_apprise.py | 0 .../tests/arangodb}/__init__.py | 0 .../tests/arangodb/hooks}/__init__.py | 0 .../tests}/arangodb/hooks/test_arangodb.py | 0 .../tests/arangodb}/operators/__init__.py | 0 .../arangodb/operators/test_arangodb.py | 0 .../tests/arangodb/sensors}/__init__.py | 0 .../tests}/arangodb/sensors/test_arangodb.py | 0 .../tests/asana}/__init__.py | 0 .../tests/asana/hooks}/__init__.py | 0 .../tests}/asana/hooks/test_asana.py | 0 .../tests/asana/operators}/__init__.py | 0 .../asana/operators/test_asana_tasks.py | 0 .../tests/atlassian}/__init__.py | 0 .../tests/atlassian/jira}/__init__.py | 0 .../tests/atlassian/jira/hooks}/__init__.py | 0 .../tests}/atlassian/jira/hooks/test_jira.py | 3 +- .../atlassian/jira/notifications}/__init__.py | 0 .../atlassian/jira/notifications/test_jira.py | 0 .../atlassian/jira/operators}/__init__.py | 0 .../atlassian/jira/operators/test_jira.py | 3 +- .../tests/atlassian/jira/sensors}/__init__.py | 0 .../atlassian/jira/sensors/test_jira.py | 3 +- .../tests/celery}/__init__.py | 0 .../tests/celery/cli}/__init__.py | 0 .../tests}/celery/cli/test_celery_command.py | 5 +- .../tests/celery/executors}/__init__.py | 0 .../celery/executors/test_celery_executor.py | 21 +- .../test_celery_kubernetes_executor.py | 0 .../tests/celery/log_handlers}/__init__.py | 0 .../celery/log_handlers/test_log_handlers.py | 5 +- .../tests/celery/sensors}/__init__.py | 0 .../celery/sensors/test_celery_queue.py | 0 .../tests/cloudant}/__init__.py | 0 .../tests/cloudant/hooks}/__init__.py | 0 .../tests}/cloudant/hooks/test_cloudant.py | 0 .../beam => providers/tests/cncf}/__init__.py | 0 .../tests/cncf/kubernetes}/__init__.py | 0 .../tests/cncf/kubernetes/cli}/__init__.py | 0 .../kubernetes/cli/test_kubernetes_command.py | 3 +- .../tests}/cncf/kubernetes/conftest.py | 0 .../cncf/kubernetes/data_files}/__init__.py | 0 .../data_files/executor}/__init__.py | 0 .../data_files/executor/basic_template.yaml | 0 .../cncf/kubernetes/data_files/kube_config | 0 .../kubernetes/data_files/pods}/__init__.py | 0 .../data_files/pods/generator_base.yaml | 0 .../pods/generator_base_with_secrets.yaml | 0 .../kubernetes/data_files/pods/template.yaml | 0 .../kubernetes/data_files/spark}/__init__.py | 0 .../spark/application_template.yaml | 0 .../data_files/spark/application_test.json | 0 .../data_files/spark/application_test.yaml | 0 ...ication_test_with_no_name_from_config.json | 0 ...ication_test_with_no_name_from_config.yaml | 0 .../cncf/kubernetes/decorators}/__init__.py | 0 .../kubernetes/decorators/test_kubernetes.py | 0 .../cncf/kubernetes/executors}/__init__.py | 0 .../executors/test_kubernetes_executor.py | 20 +- .../test_local_kubernetes_executor.py | 0 .../tests/cncf/kubernetes/hooks}/__init__.py | 0 .../cncf/kubernetes/hooks/test_kubernetes.py | 10 +- .../cncf/kubernetes/log_handlers}/__init__.py | 0 .../log_handlers/test_log_handlers.py | 5 +- .../tests/cncf/kubernetes/models}/__init__.py | 0 .../cncf/kubernetes/models/test_secret.py | 0 .../cncf/kubernetes/operators}/__init__.py | 0 .../operators/test_custom_object_launcher.py | 0 .../cncf/kubernetes/operators/test_job.py | 0 .../cncf/kubernetes/operators/test_pod.py | 9 +- .../kubernetes/operators/test_resource.py | 0 .../operators/test_spark_kubernetes.py | 3 +- .../kubernetes/resource_convert}/__init__.py | 0 .../resource_convert/test_configmap.py | 0 .../resource_convert/test_env_variable.py | 0 .../resource_convert/test_secret.py | 0 .../cncf/kubernetes/sensors}/__init__.py | 0 .../sensors/test_spark_kubernetes.py | 0 .../tests}/cncf/kubernetes/test_callbacks.py | 0 .../tests}/cncf/kubernetes/test_client.py | 3 +- .../test_kubernetes_helper_functions.py | 0 .../cncf/kubernetes/test_pod_generator.py | 0 .../kubernetes/test_template_rendering.py | 10 +- .../cncf/kubernetes/triggers}/__init__.py | 0 .../cncf/kubernetes/triggers/test_job.py | 0 .../cncf/kubernetes/triggers/test_pod.py | 0 .../tests/cncf/kubernetes/utils}/__init__.py | 0 .../utils/test_k8s_resource_iterator.py | 0 .../cncf/kubernetes/utils/test_pod_manager.py | 3 +- .../tests/cohere}/__init__.py | 0 .../tests/cohere/hooks}/__init__.py | 0 .../tests}/cohere/hooks/test_cohere.py | 13 +- .../tests/cohere/operators}/__init__.py | 0 .../tests}/cohere/operators/test_embedding.py | 0 .../tests/common}/__init__.py | 0 .../tests/common/compat}/__init__.py | 0 .../tests/common/compat/lineage}/__init__.py | 0 .../tests}/common/compat/lineage/test_hook.py | 0 .../common/compat/openlineage}/__init__.py | 0 .../common/compat/openlineage/test_facet.py | 0 .../compat/openlineage/utils}/__init__.py | 0 .../compat/openlineage/utils/test_utils.py | 0 .../tests/common/compat/security}/__init__.py | 0 .../compat/security/test_permissions.py | 0 .../tests/common/io}/__init__.py | 0 .../tests/common/io/assets}/__init__.py | 0 .../tests}/common/io/assets/test_file.py | 0 .../tests/common/io/operators}/__init__.py | 0 .../common/io/operators/test_file_transfer.py | 3 +- .../tests/common/io/xcom}/__init__.py | 0 .../tests}/common/io/xcom/test_backend.py | 9 +- .../tests/common/sql}/__init__.py | 0 .../tests/common/sql/hooks}/__init__.py | 0 .../tests}/common/sql/hooks/test_dbapi.py | 3 +- .../tests}/common/sql/hooks/test_sql.py | 5 +- .../tests}/common/sql/hooks/test_sqlparse.py | 3 +- .../tests/common/sql/operators}/__init__.py | 0 .../tests}/common/sql/operators/test_sql.py | 3 +- .../common/sql/operators/test_sql_execute.py | 3 +- .../tests/common/sql/sensors}/__init__.py | 0 .../tests}/common/sql/sensors/test_sql.py | 3 +- .../tests}/common/sql/test_utils.py | 3 +- .../providers => providers/tests}/conftest.py | 13 +- .../tests/databricks}/__init__.py | 0 .../tests/databricks/hooks}/__init__.py | 0 .../databricks/hooks/test_databricks.py | 0 .../databricks/hooks/test_databricks_base.py | 0 .../databricks/hooks/test_databricks_sql.py | 7 +- .../tests/databricks/operators}/__init__.py | 0 .../databricks/operators/test_databricks.py | 0 .../operators/test_databricks_copy.py | 0 .../operators/test_databricks_repos.py | 0 .../operators/test_databricks_sql.py | 0 .../operators/test_databricks_workflow.py | 0 .../tests/databricks/plugins}/__init__.py | 0 .../plugins/test_databricks_workflow.py | 5 + .../tests/databricks/sensors}/__init__.py | 0 .../sensors/test_databricks_partition.py | 0 .../databricks/sensors/test_databricks_sql.py | 0 .../tests/databricks/triggers}/__init__.py | 0 .../databricks/triggers/test_databricks.py | 0 .../tests/databricks/utils}/__init__.py | 0 .../databricks/utils/test_databricks.py | 0 .../tests/datadog}/__init__.py | 0 .../tests/datadog/hooks}/__init__.py | 0 .../tests}/datadog/hooks/test_datadog.py | 0 .../tests/datadog/sensors}/__init__.py | 0 .../tests}/datadog/sensors/test_datadog.py | 0 .../cli => providers/tests/dbt}/__init__.py | 0 .../tests/dbt/cloud}/__init__.py | 0 .../tests/dbt/cloud/hooks}/__init__.py | 0 .../tests}/dbt/cloud/hooks/test_dbt.py | 5 +- .../tests/dbt/cloud/operators}/__init__.py | 0 .../tests}/dbt/cloud/operators/test_dbt.py | 5 +- .../tests/dbt/cloud/sensors}/__init__.py | 0 .../tests}/dbt/cloud/sensors/test_dbt.py | 0 .../tests/dbt/cloud/test_data}/__init__.py | 0 .../tests}/dbt/cloud/test_data/catalog.json | 0 .../tests}/dbt/cloud/test_data/job_run.json | 0 .../tests}/dbt/cloud/test_data/manifest.json | 0 .../dbt/cloud/test_data/run_results.json | 0 .../tests/dbt/cloud/triggers}/__init__.py | 0 .../tests}/dbt/cloud/triggers/test_dbt.py | 0 .../tests/dbt/cloud/utils}/__init__.py | 0 .../dbt/cloud/utils/test_openlineage.py | 0 providers/tests/deprecations_ignore.yml | 128 ++ .../tests/dingding}/__init__.py | 0 .../tests/dingding/hooks}/__init__.py | 0 .../tests}/dingding/hooks/test_dingding.py | 0 .../tests/dingding/operators}/__init__.py | 0 .../dingding/operators/test_dingding.py | 0 .../tests/discord}/__init__.py | 0 .../tests/discord/hooks}/__init__.py | 0 .../discord/hooks/test_discord_webhook.py | 0 .../tests/discord/notifications}/__init__.py | 0 .../discord/notifications/test_discord.py | 0 .../tests/discord/operators}/__init__.py | 0 .../discord/operators/test_discord_webhook.py | 0 .../tests/docker}/__init__.py | 0 .../tests}/docker/conftest.py | 0 .../tests/docker/decorators}/__init__.py | 0 .../tests}/docker/decorators/test_docker.py | 0 .../tests/docker/hooks}/__init__.py | 0 .../tests}/docker/hooks/test_docker.py | 0 .../tests/docker/operators}/__init__.py | 0 .../tests}/docker/operators/test_docker.py | 5 +- .../docker/operators/test_docker_swarm.py | 0 .../tests}/docker/test_exceptions.py | 0 .../tests/edge}/__init__.py | 0 .../tests/edge/api_endpoints}/__init__.py | 0 .../api_endpoints/test_health_endpoint.py | 0 .../api_endpoints/test_rpc_api_endpoint.py | 5 +- .../tests/edge/cli}/__init__.py | 0 .../tests}/edge/cli/test_edge_command.py | 3 +- .../tests/edge/models}/__init__.py | 0 .../tests}/edge/models/test_edge_job.py | 0 .../tests}/edge/models/test_edge_logs.py | 0 .../tests}/edge/models/test_edge_worker.py | 0 .../tests/edge/plugins}/__init__.py | 0 .../edge/plugins/test_edge_executor_plugin.py | 3 +- .../tests/elasticsearch}/__init__.py | 0 .../tests/elasticsearch/hooks}/__init__.py | 0 .../elasticsearch/hooks/test_elasticsearch.py | 0 .../tests/elasticsearch/log}/__init__.py | 0 .../elasticsearch/log/elasticmock/__init__.py | 2 +- .../log/elasticmock/fake_elasticsearch.py | 2 +- .../log/elasticmock/utilities/__init__.py | 0 .../log/test_es_json_formatter.py | 0 .../elasticsearch/log/test_es_response.py | 0 .../elasticsearch/log/test_es_task_handler.py | 9 +- .../tests/email}/__init__.py | 0 .../tests/email/operators}/__init__.py | 0 .../tests/exasol}/__init__.py | 0 .../tests/exasol/hooks}/__init__.py | 0 .../tests}/exasol/hooks/test_exasol.py | 0 .../tests}/exasol/hooks/test_sql.py | 0 .../tests/exasol/operators}/__init__.py | 0 .../tests}/exasol/operators/test_exasol.py | 0 .../exasol/operators/test_exasol_sql.py | 0 .../hooks => providers/tests/fab}/__init__.py | 0 .../tests/fab/auth_manager}/__init__.py | 0 .../tests/fab/auth_manager/api}/__init__.py | 0 .../fab/auth_manager/api/auth}/__init__.py | 0 .../api/auth/backend}/__init__.py | 0 .../api/auth/backend/test_basic_auth.py | 3 +- .../api/auth/backend/test_kerberos_auth.py | 2 +- .../auth_manager/api_endpoints}/__init__.py | 0 .../api_endpoints/api_connexion_utils.py | 2 +- .../remote_user_api_auth_backend.py | 0 .../api_endpoints/test_asset_endpoint.py | 13 +- .../auth_manager/api_endpoints/test_auth.py | 10 +- .../api_endpoints/test_backfill_endpoint.py | 24 +- .../auth_manager/api_endpoints/test_cors.py | 6 +- .../api_endpoints/test_dag_endpoint.py | 9 +- .../api_endpoints/test_dag_run_endpoint.py | 15 +- .../api_endpoints/test_dag_source_endpoint.py | 7 +- .../test_dag_warning_endpoint.py | 7 +- .../api_endpoints/test_event_log_endpoint.py | 7 +- .../test_import_error_endpoint.py | 9 +- .../test_role_and_permission_endpoint.py | 11 +- .../test_role_and_permission_schema.py | 3 +- .../test_task_instance_endpoint.py | 7 +- .../api_endpoints/test_user_endpoint.py | 15 +- .../api_endpoints/test_user_schema.py | 7 +- .../api_endpoints/test_variable_endpoint.py | 7 +- .../api_endpoints/test_xcom_endpoint.py | 7 +- .../auth_manager/cli_commands}/__init__.py | 0 .../cli_commands/test_db_command.py | 0 .../cli_commands/test_definition.py | 2 +- .../cli_commands/test_role_command.py | 5 +- .../cli_commands/test_sync_perm_command.py | 3 +- .../cli_commands/test_user_command.py | 3 +- .../auth_manager/cli_commands/test_utils.py | 2 +- .../tests}/fab/auth_manager/conftest.py | 7 +- .../fab/auth_manager/decorators}/__init__.py | 0 .../fab/auth_manager/decorators/test_auth.py | 3 +- .../fab/auth_manager/models}/__init__.py | 0 .../models/test_anonymous_user.py | 2 +- .../tests}/fab/auth_manager/models/test_db.py | 13 +- .../security_manager}/__init__.py | 0 .../security_manager/test_constants.py | 2 +- .../security_manager/test_override.py | 2 +- .../fab/auth_manager/test_fab_auth_manager.py | 2 +- .../tests}/fab/auth_manager/test_models.py | 2 +- .../tests}/fab/auth_manager/test_security.py | 14 +- .../tests/fab/auth_manager/views}/__init__.py | 0 .../auth_manager/views/test_permissions.py | 7 +- .../fab/auth_manager/views/test_roles_list.py | 7 +- .../fab/auth_manager/views/test_user.py | 7 +- .../fab/auth_manager/views/test_user_edit.py | 7 +- .../fab/auth_manager/views/test_user_stats.py | 7 +- .../tests/facebook}/__init__.py | 0 .../tests/facebook/ads}/__init__.py | 0 .../tests/facebook/ads/hooks}/__init__.py | 0 .../tests}/facebook/ads/hooks/test_ads.py | 0 .../tests/ftp}/__init__.py | 0 .../tests/ftp/hooks}/__init__.py | 0 .../tests}/ftp/hooks/test_ftp.py | 0 .../tests/ftp/operators}/__init__.py | 0 .../tests}/ftp/operators/test_ftp.py | 0 .../tests/ftp/sensors}/__init__.py | 0 .../tests}/ftp/sensors/test_ftp.py | 0 .../tests/github}/__init__.py | 0 .../tests/github/hooks}/__init__.py | 0 .../tests}/github/hooks/test_github.py | 0 .../tests/github/operators}/__init__.py | 0 .../tests}/github/operators/test_github.py | 0 .../tests/github/sensors}/__init__.py | 0 .../tests}/github/sensors/test_github.py | 0 .../tests/google}/__init__.py | 0 .../tests/google/ads}/__init__.py | 0 .../tests/google/ads/hooks}/__init__.py | 0 .../tests}/google/ads/hooks/test_ads.py | 0 .../tests/google/ads}/operators/__init__.py | 0 .../tests}/google/ads/operators/test_ads.py | 0 .../tests/google/ads/transfers}/__init__.py | 0 .../google/ads/transfers/test_ads_to_gcs.py | 3 +- .../tests/google/assets}/__init__.py | 0 .../tests/google/assets}/test_bigquery.py | 0 .../tests/google/cloud}/__init__.py | 0 .../cloud/_internal_client}/__init__.py | 0 .../test_secret_manager_client.py | 0 .../tests/google/cloud/hooks}/__init__.py | 0 .../tests}/google/cloud/hooks/test_automl.py | 3 +- .../google/cloud/hooks/test_bigquery.py | 0 .../google/cloud/hooks/test_bigquery_dts.py | 3 +- .../cloud/hooks/test_bigquery_system.py | 5 +- .../google/cloud/hooks/test_bigtable.py | 3 +- .../google/cloud/hooks/test_cloud_batch.py | 3 +- .../google/cloud/hooks/test_cloud_build.py | 3 +- .../google/cloud/hooks/test_cloud_composer.py | 0 .../cloud/hooks/test_cloud_memorystore.py | 3 +- .../google/cloud/hooks/test_cloud_run.py | 3 +- .../google/cloud/hooks/test_cloud_sql.py | 3 +- .../test_cloud_storage_transfer_service.py | 3 +- ...st_cloud_storage_transfer_service_async.py | 3 +- .../tests}/google/cloud/hooks/test_compute.py | 3 +- .../google/cloud/hooks/test_compute_ssh.py | 0 .../google/cloud/hooks/test_datacatalog.py | 3 +- .../google/cloud/hooks/test_dataflow.py | 0 .../google/cloud/hooks/test_dataform.py | 3 +- .../google/cloud/hooks/test_datafusion.py | 3 +- .../google/cloud/hooks/test_datapipeline.py | 0 .../google/cloud/hooks/test_dataplex.py | 3 +- .../google/cloud/hooks/test_dataprep.py | 0 .../google/cloud/hooks/test_dataproc.py | 0 .../cloud/hooks/test_dataproc_metastore.py | 3 +- .../google/cloud/hooks/test_datastore.py | 0 .../tests}/google/cloud/hooks/test_dlp.py | 3 +- .../google/cloud/hooks/test_functions.py | 3 +- .../tests}/google/cloud/hooks/test_gcs.py | 3 +- .../tests}/google/cloud/hooks/test_gdm.py | 0 .../tests}/google/cloud/hooks/test_kms.py | 0 .../google/cloud/hooks/test_kms_system.py | 5 +- .../cloud/hooks/test_kubernetes_engine.py | 3 +- .../google/cloud/hooks/test_life_sciences.py | 3 +- .../tests}/google/cloud/hooks/test_looker.py | 0 .../google/cloud/hooks/test_mlengine.py | 3 +- .../cloud/hooks/test_natural_language.py | 3 +- .../google/cloud/hooks/test_os_login.py | 3 +- .../tests}/google/cloud/hooks/test_pubsub.py | 0 .../google/cloud/hooks/test_secret_manager.py | 3 +- .../cloud/hooks/test_secret_manager_system.py | 5 +- .../tests}/google/cloud/hooks/test_spanner.py | 3 +- .../google/cloud/hooks/test_speech_to_text.py | 3 +- .../google/cloud/hooks/test_stackdriver.py | 0 .../tests}/google/cloud/hooks/test_tasks.py | 3 +- .../google/cloud/hooks/test_text_to_speech.py | 3 +- .../google/cloud/hooks/test_translate.py | 3 +- .../cloud/hooks/test_video_intelligence.py | 3 +- .../tests}/google/cloud/hooks/test_vision.py | 3 +- .../google/cloud/hooks/test_workflows.py | 0 .../google/cloud/hooks/vertex_ai}/__init__.py | 0 .../cloud/hooks/vertex_ai/test_auto_ml.py | 3 +- .../vertex_ai/test_batch_prediction_job.py | 3 +- .../cloud/hooks/vertex_ai/test_custom_job.py | 3 +- .../cloud/hooks/vertex_ai/test_dataset.py | 3 +- .../hooks/vertex_ai/test_endpoint_service.py | 3 +- .../hooks/vertex_ai/test_generative_model.py | 3 +- .../test_hyperparameter_tuning_job.py | 3 +- .../hooks/vertex_ai/test_model_service.py | 3 +- .../hooks/vertex_ai/test_pipeline_job.py | 3 +- .../vertex_ai/test_prediction_service.py | 3 +- .../tests/google/cloud/links}/__init__.py | 0 .../google/cloud/links/test_translate.py | 0 .../tests/google/cloud/log}/__init__.py | 0 .../google/cloud/log/test_gcs_task_handler.py | 5 +- .../cloud/log/test_gcs_task_handler_system.py | 24 +- .../log/test_stackdriver_task_handler.py | 7 +- .../test_stackdriver_task_handler_system.py | 22 +- .../google/cloud/openlineage}/__init__.py | 0 .../google/cloud/openlineage/test_mixins.py | 15 +- .../google/cloud/openlineage/test_utils.py | 4 +- .../tests/google/cloud/operators}/__init__.py | 0 .../cloud/operators/source}/__init__.py | 0 .../source/source_prefix}/__init__.py | 0 .../google/cloud/operators/test_automl.py | 0 .../google/cloud/operators/test_bigquery.py | 11 +- .../cloud/operators/test_bigquery_dts.py | 3 +- .../google/cloud/operators/test_bigtable.py | 0 .../google/cloud/operators/test_cloud_base.py | 0 .../cloud/operators/test_cloud_batch.py | 0 .../cloud/operators/test_cloud_build.py | 0 .../cloud/operators/test_cloud_composer.py | 0 .../cloud/operators/test_cloud_memorystore.py | 0 .../google/cloud/operators/test_cloud_run.py | 0 .../google/cloud/operators/test_cloud_sql.py | 0 .../test_cloud_storage_transfer_service.py | 0 .../google/cloud/operators/test_compute.py | 0 .../cloud/operators/test_datacatalog.py | 0 .../google/cloud/operators/test_dataflow.py | 0 .../google/cloud/operators/test_dataform.py | 0 .../google/cloud/operators/test_datafusion.py | 0 .../cloud/operators/test_datapipeline.py | 0 .../google/cloud/operators/test_dataplex.py | 0 .../google/cloud/operators/test_dataprep.py | 0 .../cloud/operators/test_dataprep_system.py | 5 +- .../google/cloud/operators/test_dataproc.py | 5 +- .../operators/test_dataproc_metastore.py | 0 .../google/cloud/operators/test_datastore.py | 0 .../cloud/operators/test_datastore_system.py | 8 +- .../tests}/google/cloud/operators/test_dlp.py | 0 .../google/cloud/operators/test_functions.py | 0 .../tests}/google/cloud/operators/test_gcs.py | 0 .../cloud/operators/test_kubernetes_engine.py | 0 .../cloud/operators/test_life_sciences.py | 0 .../google/cloud/operators/test_looker.py | 3 +- .../google/cloud/operators/test_mlengine.py | 0 .../cloud/operators/test_natural_language.py | 0 .../google/cloud/operators/test_pubsub.py | 0 .../google/cloud/operators/test_spanner.py | 0 .../cloud/operators/test_speech_to_text.py | 0 .../cloud/operators/test_stackdriver.py | 0 .../google/cloud/operators/test_tasks.py | 0 .../cloud/operators/test_text_to_speech.py | 0 .../google/cloud/operators/test_translate.py | 0 .../cloud/operators/test_translate_speech.py | 0 .../google/cloud/operators/test_vertex_ai.py | 18 +- .../operators/test_video_intelligence.py | 0 .../google/cloud/operators/test_vision.py | 0 .../google/cloud/operators/test_workflows.py | 0 .../cloud/operators/vertex_ai}/__init__.py | 0 .../vertex_ai/test_generative_model.py | 0 .../tests/google/cloud/secrets}/__init__.py | 0 .../cloud/secrets/test_secret_manager.py | 0 .../secrets/test_secret_manager_system.py | 4 +- .../tests/google/cloud/sensors}/__init__.py | 0 .../google/cloud/sensors/test_bigquery.py | 0 .../google/cloud/sensors/test_bigquery_dts.py | 0 .../google/cloud/sensors/test_bigtable.py | 0 .../cloud/sensors/test_cloud_composer.py | 0 .../test_cloud_storage_transfer_service.py | 0 .../google/cloud/sensors/test_dataflow.py | 0 .../google/cloud/sensors/test_datafusion.py | 0 .../google/cloud/sensors/test_dataplex.py | 0 .../google/cloud/sensors/test_dataprep.py | 0 .../google/cloud/sensors/test_dataproc.py | 0 .../cloud/sensors/test_dataproc_metastore.py | 0 .../tests}/google/cloud/sensors/test_gcs.py | 0 .../google/cloud/sensors/test_looker.py | 0 .../google/cloud/sensors/test_pubsub.py | 0 .../tests}/google/cloud/sensors/test_tasks.py | 0 .../google/cloud/sensors/test_workflows.py | 0 .../tests/google/cloud/transfers}/__init__.py | 0 .../cloud/transfers/test_adls_to_gcs.py | 0 .../cloud/transfers/test_azure_blob_to_gcs.py | 0 .../transfers/test_azure_fileshare_to_gcs.py | 0 .../transfers/test_bigquery_to_bigquery.py | 0 .../cloud/transfers/test_bigquery_to_gcs.py | 0 .../cloud/transfers/test_bigquery_to_mssql.py | 0 .../cloud/transfers/test_bigquery_to_mysql.py | 0 .../transfers/test_bigquery_to_postgres.py | 0 .../cloud/transfers/test_calendar_to_gcs.py | 0 .../cloud/transfers/test_cassandra_to_gcs.py | 18 +- .../transfers/test_facebook_ads_to_gcs.py | 0 .../test_facebook_ads_to_gcs_system.py | 9 +- .../cloud/transfers/test_gcs_to_bigquery.py | 0 .../google/cloud/transfers/test_gcs_to_gcs.py | 0 .../cloud/transfers/test_gcs_to_local.py | 0 .../cloud/transfers/test_gcs_to_sftp.py | 0 .../cloud/transfers/test_gdrive_to_gcs.py | 0 .../cloud/transfers/test_gdrive_to_local.py | 0 .../cloud/transfers/test_local_to_gcs.py | 0 .../cloud/transfers/test_mssql_to_gcs.py | 0 .../cloud/transfers/test_mysql_to_gcs.py | 0 .../cloud/transfers/test_oracle_to_gcs.py | 0 .../cloud/transfers/test_postgres_to_gcs.py | 0 .../google/cloud/transfers/test_s3_to_gcs.py | 0 .../cloud/transfers/test_salesforce_to_gcs.py | 0 .../test_salesforce_to_gcs_system.py | 10 +- .../cloud/transfers/test_sftp_to_gcs.py | 0 .../cloud/transfers/test_sheets_to_gcs.py | 0 .../google/cloud/transfers/test_sql_to_gcs.py | 0 .../cloud/transfers/test_trino_to_gcs.py | 0 .../tests/google/cloud/triggers}/__init__.py | 0 .../google/cloud/triggers/test_bigquery.py | 0 .../cloud/triggers/test_bigquery_dts.py | 0 .../google/cloud/triggers/test_cloud_batch.py | 0 .../google/cloud/triggers/test_cloud_build.py | 0 .../cloud/triggers/test_cloud_composer.py | 0 .../google/cloud/triggers/test_cloud_run.py | 0 .../google/cloud/triggers/test_cloud_sql.py | 0 .../test_cloud_storage_transfer_service.py | 0 .../google/cloud/triggers/test_dataflow.py | 0 .../google/cloud/triggers/test_datafusion.py | 0 .../google/cloud/triggers/test_dataplex.py | 0 .../google/cloud/triggers/test_dataproc.py | 0 .../tests}/google/cloud/triggers/test_gcs.py | 0 .../cloud/triggers/test_kubernetes_engine.py | 0 .../google/cloud/triggers/test_mlengine.py | 0 .../google/cloud/triggers/test_pubsub.py | 0 .../google/cloud/triggers/test_vertex_ai.py | 3 +- .../tests/google/cloud/utils}/__init__.py | 0 .../tests}/google/cloud/utils/airflow_util.py | 0 .../google/cloud/utils/base_gcp_mock.py | 0 .../google/cloud/utils/gcp_authenticator.py | 5 +- .../google/cloud/utils/job_details.json | 0 .../google/cloud/utils/out_table_details.json | 0 .../cloud/utils/script_job_details.json | 0 .../google/cloud/utils/table_details.json | 0 .../cloud/utils/test_credentials_provider.py | 12 +- .../google/cloud/utils/test_datafusion.py | 0 .../google/cloud/utils/test_dataproc.py | 0 .../utils/test_external_token_supplier.py | 0 .../cloud/utils/test_field_sanitizer.py | 0 .../cloud/utils/test_field_validator.py | 0 .../tests}/google/cloud/utils/test_helpers.py | 0 .../utils/test_mlengine_operator_utils.py | 0 .../utils/test_mlengine_prediction_summary.py | 12 +- .../tests/google/common}/__init__.py | 0 .../google/common/auth_backend}/__init__.py | 0 .../common/auth_backend/test_google_openid.py | 9 +- .../tests/google/common/hooks}/__init__.py | 0 .../google/common/hooks/test_base_google.py | 3 +- .../google/common/hooks/test_discovery_api.py | 0 .../tests}/google/common/test_deprecated.py | 0 .../tests/google/common/utils}/__init__.py | 0 .../common/utils/test_id_token_credentials.py | 0 .../tests/google/firebase}/__init__.py | 0 .../tests/google/firebase/hooks}/__init__.py | 0 .../google/firebase/hooks/test_firestore.py | 3 +- .../google/firebase/operators}/__init__.py | 0 .../firebase/operators/test_firestore.py | 0 .../tests/google/leveldb}/__init__.py | 0 .../tests/google/leveldb/hooks}/__init__.py | 0 .../google/leveldb/hooks/test_leveldb.py | 0 .../google/leveldb/operators}/__init__.py | 0 .../google/leveldb/operators/test_leveldb.py | 0 .../google/marketing_platform}/__init__.py | 0 .../marketing_platform/hooks}/__init__.py | 0 .../hooks/test_analytics.py | 14 +- .../hooks/test_analytics_admin.py | 3 +- .../hooks/test_campaign_manager.py | 3 +- .../hooks/test_display_video.py | 3 +- .../hooks/test_search_ads.py | 3 +- .../marketing_platform/links}/__init__.py | 0 .../links/test_analytics_admin.py | 0 .../marketing_platform/operators}/__init__.py | 0 .../operators/test_analytics.py | 0 .../operators/test_analytics_admin.py | 0 .../operators/test_campaign_manager.py | 0 .../operators/test_display_video.py | 0 .../operators/test_display_video_system.py | 9 +- .../operators/test_search_ads.py | 0 .../marketing_platform/sensors}/__init__.py | 0 .../sensors/test_campaign_manager.py | 0 .../sensors/test_display_video.py | 0 .../tests/google/suite}/__init__.py | 0 .../tests/google/suite/hooks}/__init__.py | 0 .../google/suite/hooks/test_calendar.py | 3 +- .../tests}/google/suite/hooks/test_drive.py | 3 +- .../tests}/google/suite/hooks/test_sheets.py | 3 +- .../tests/google/suite/operators}/__init__.py | 0 .../google/suite/operators/test_sheets.py | 0 .../tests/google/suite/sensors}/__init__.py | 0 .../tests}/google/suite/sensors/test_drive.py | 0 .../tests/google/suite/transfers}/__init__.py | 0 .../suite/transfers/test_gcs_to_gdrive.py | 0 .../suite/transfers/test_gcs_to_sheets.py | 0 .../suite/transfers/test_local_to_drive.py | 0 .../suite/transfers/test_sql_to_sheets.py | 0 .../tests}/google/test_go_module.py | 0 .../tests/grpc}/__init__.py | 0 .../tests/grpc/hooks}/__init__.py | 0 .../tests}/grpc/hooks/test_grpc.py | 0 .../tests/grpc/operators}/__init__.py | 0 .../tests}/grpc/operators/test_grpc.py | 0 .../tests/hashicorp}/__init__.py | 0 .../hashicorp/_internal_client}/__init__.py | 0 .../_internal_client/test_vault_client.py | 0 .../tests/hashicorp/hooks}/__init__.py | 0 .../tests}/hashicorp/hooks/test_vault.py | 3 +- .../tests/hashicorp/secrets}/__init__.py | 0 .../tests}/hashicorp/secrets/test_vault.py | 0 .../tests/http}/__init__.py | 0 .../tests/http/hooks}/__init__.py | 0 .../tests}/http/hooks/test_http.py | 53 +- .../tests/http/operators}/__init__.py | 0 .../tests}/http/operators/test_http.py | 0 .../tests/http/sensors}/__init__.py | 0 .../tests}/http/sensors/test_http.py | 0 .../tests/http/triggers}/__init__.py | 0 .../tests}/http/triggers/test_http.py | 0 .../tests/imap}/__init__.py | 0 .../tests/imap/hooks}/__init__.py | 0 .../tests}/imap/hooks/test_imap.py | 3 +- .../tests/imap/sensors}/__init__.py | 0 .../imap/sensors/test_imap_attachment.py | 0 .../tests/influxdb}/__init__.py | 0 .../tests/influxdb/hooks}/__init__.py | 0 .../tests}/influxdb/hooks/test_influxdb.py | 0 .../tests/influxdb/operators}/__init__.py | 0 .../influxdb/operators/test_influxdb.py | 0 .../tests/integration}/__init__.py | 0 .../tests/integration/apache}/__init__.py | 0 .../integration/apache/cassandra}/__init__.py | 0 .../apache/cassandra/hooks}/__init__.py | 0 .../apache/cassandra/hooks/test_cassandra.py | 0 .../integration/apache/drill}/__init__.py | 0 .../apache/drill/hooks}/__init__.py | 0 .../apache/drill/hooks/test_drill.py | 0 .../apache/drill/operators}/__init__.py | 0 .../integration/apache/hive}/__init__.py | 0 .../apache/hive/transfers}/__init__.py | 0 .../hive/transfers/test_mssql_to_hive.py | 0 .../integration/apache/kafka}/__init__.py | 0 .../apache/kafka/hooks}/__init__.py | 0 .../apache/kafka/hooks/test_admin_client.py | 0 .../apache/kafka/hooks/test_consumer.py | 0 .../apache/kafka/hooks/test_producer.py | 0 .../apache/kafka/operators}/__init__.py | 0 .../apache/kafka/operators/test_consume.py | 0 .../apache/kafka/operators/test_produce.py | 0 .../apache/kafka/sensors}/__init__.py | 0 .../apache/kafka/triggers}/__init__.py | 0 .../kafka/triggers/test_await_message.py | 0 .../integration/apache/pinot}/__init__.py | 0 .../apache/pinot/hooks}/__init__.py | 0 .../apache/pinot/hooks/test_pinot.py | 0 .../tests/integration/google}/__init__.py | 0 .../integration/google/cloud}/__init__.py | 0 .../google/cloud/transfers}/__init__.py | 0 .../cloud/transfers/test_bigquery_to_mssql.py | 0 .../cloud/transfers/test_mssql_to_gcs.py | 0 .../cloud/transfers/test_trino_to_gcs.py | 0 .../tests/integration/microsoft}/__init__.py | 0 .../integration/microsoft/mssql}/__init__.py | 0 .../microsoft/mssql/hooks}/__init__.py | 0 .../microsoft/mssql/hooks/test_mssql.py | 0 .../tests/integration/mongo}/__init__.py | 0 .../integration/mongo/sensors}/__init__.py | 0 .../integration}/mongo/sensors/test_mongo.py | 0 .../integration/openlineage}/__init__.py | 0 .../openlineage/operators}/__init__.py | 0 .../tests/integration/qdrant}/__init__.py | 0 .../integration/qdrant/hooks}/__init__.py | 0 .../integration}/qdrant/hooks/test_qdrant.py | 0 .../integration/qdrant/operators}/__init__.py | 0 .../qdrant/operators/test_qdrant_ingest.py | 0 .../tests/integration/redis}/__init__.py | 0 .../integration/redis/hooks}/__init__.py | 0 .../integration}/redis/hooks/test_redis.py | 0 .../integration/redis/operators}/__init__.py | 0 .../redis/operators/test_redis_publish.py | 0 .../integration/redis/sensors}/__init__.py | 0 .../redis/sensors/test_redis_key.py | 0 .../redis/sensors/test_redis_pub_sub.py | 0 .../tests/integration/trino}/__init__.py | 0 .../integration/trino/hooks}/__init__.py | 0 .../integration}/trino/hooks/test_trino.py | 0 .../tests/integration/ydb}/__init__.py | 0 .../tests/integration/ydb/hooks}/__init__.py | 0 .../integration/ydb/operators}/__init__.py | 0 .../integration}/ydb/operators/test_ydb.py | 0 .../tests/jdbc}/__init__.py | 0 .../tests/jdbc/hooks}/__init__.py | 0 .../tests}/jdbc/hooks/test_jdbc.py | 7 +- .../tests/jdbc/operators}/__init__.py | 0 .../tests}/jdbc/operators/test_jdbc.py | 0 .../tests/jenkins}/__init__.py | 0 .../tests/jenkins/hooks}/__init__.py | 0 .../tests}/jenkins/hooks/test_jenkins.py | 0 .../tests/jenkins}/operators/__init__.py | 0 .../operators/test_jenkins_job_trigger.py | 85 +- .../tests/jenkins/sensors}/__init__.py | 0 .../tests}/jenkins/sensors/test_jenkins.py | 0 .../tests/microsoft}/__init__.py | 0 .../tests/microsoft/azure}/__init__.py | 0 .../tests}/microsoft/azure/base.py | 10 +- .../tests/microsoft/azure/fs}/__init__.py | 0 .../tests}/microsoft/azure/fs/test_adls.py | 0 .../tests/microsoft/azure/hooks}/__init__.py | 0 .../tests}/microsoft/azure/hooks/test_adx.py | 0 .../tests}/microsoft/azure/hooks/test_asb.py | 0 .../microsoft/azure/hooks/test_base_azure.py | 0 .../microsoft/azure/hooks/test_batch.py | 0 .../azure/hooks/test_container_instance.py | 11 +- .../azure/hooks/test_container_registry.py | 0 .../azure/hooks/test_container_volume.py | 0 .../microsoft/azure/hooks/test_cosmos.py | 0 .../azure/hooks/test_data_factory.py | 0 .../microsoft/azure/hooks/test_data_lake.py | 0 .../microsoft/azure/hooks/test_fileshare.py | 0 .../microsoft/azure/hooks/test_msgraph.py | 3 +- .../microsoft/azure/hooks/test_powerbi.py | 0 .../microsoft/azure/hooks/test_synapse.py | 0 .../azure/hooks/test_synapse_pipeline.py | 0 .../tests}/microsoft/azure/hooks/test_wasb.py | 0 .../tests/microsoft/azure/log}/__init__.py | 0 .../azure/log/test_wasb_task_handler.py | 5 +- .../microsoft/azure/operators}/__init__.py | 0 .../azure/operators/test_adls_create.py | 0 .../azure/operators/test_adls_delete.py | 0 .../azure/operators/test_adls_list.py | 0 .../microsoft/azure/operators/test_adx.py | 0 .../microsoft/azure/operators/test_asb.py | 0 .../microsoft/azure/operators/test_batch.py | 0 .../operators/test_container_instances.py | 0 .../microsoft/azure/operators/test_cosmos.py | 0 .../azure/operators/test_data_factory.py | 0 .../microsoft/azure/operators/test_msgraph.py | 5 +- .../microsoft/azure/operators/test_powerbi.py | 5 +- .../microsoft/azure/operators/test_synapse.py | 0 .../azure/operators/test_wasb_delete_blob.py | 0 .../microsoft/azure/resources}/__init__.py | 0 .../microsoft/azure/resources/dummy.pdf | Bin .../microsoft/azure/resources/next_users.json | 0 .../microsoft/azure/resources/status.json | 0 .../microsoft/azure/resources/users.json | 0 .../microsoft/azure/secrets}/__init__.py | 0 .../microsoft/azure/secrets/test_key_vault.py | 0 .../microsoft/azure/sensors}/__init__.py | 0 .../microsoft/azure/sensors/test_cosmos.py | 0 .../azure/sensors/test_data_factory.py | 0 .../microsoft/azure/sensors/test_msgraph.py | 5 +- .../microsoft/azure/sensors/test_wasb.py | 0 .../tests}/microsoft/azure/test_utils.py | 0 .../microsoft/azure/transfers}/__init__.py | 0 .../azure/transfers/test_local_to_adls.py | 0 .../azure/transfers/test_local_to_wasb.py | 0 .../test_oracle_to_azure_data_lake.py | 0 .../azure/transfers/test_s3_to_wasb.py | 0 .../azure/transfers/test_sftp_to_wasb.py | 0 .../microsoft/azure/triggers}/__init__.py | 0 .../azure/triggers/test_data_factory.py | 0 .../microsoft/azure/triggers/test_msgraph.py | 5 +- .../microsoft/azure/triggers/test_powerbi.py | 3 +- .../microsoft/azure/triggers/test_wasb.py | 0 .../tests}/microsoft/conftest.py | 0 .../tests/microsoft/mssql}/__init__.py | 0 .../tests/microsoft/mssql/hooks}/__init__.py | 0 .../microsoft/mssql/hooks/test_mssql.py | 3 +- .../microsoft/mssql/operators}/__init__.py | 0 .../microsoft/mssql/operators/test_mssql.py | 0 .../microsoft/mssql/resources}/__init__.py | 0 .../microsoft/mssql/resources/replace.sql | 0 .../tests/microsoft/psrp}/__init__.py | 0 .../tests/microsoft/psrp/hooks}/__init__.py | 0 .../tests}/microsoft/psrp/hooks/test_psrp.py | 17 +- .../microsoft/psrp/operators}/__init__.py | 0 .../microsoft/psrp/operators/test_psrp.py | 0 .../tests/microsoft/winrm}/__init__.py | 0 .../tests/microsoft/winrm/hooks}/__init__.py | 0 .../microsoft/winrm/hooks/test_winrm.py | 0 .../microsoft/winrm/operators}/__init__.py | 0 .../microsoft/winrm/operators/test_winrm.py | 0 .../tests/mongo}/__init__.py | 0 .../tests/mongo/hooks}/__init__.py | 0 .../tests}/mongo/hooks/test_mongo.py | 3 +- .../tests/mongo/sensors}/__init__.py | 0 .../tests}/mongo/sensors/test_mongo.py | 0 .../tests/mysql}/__init__.py | 0 .../tests/mysql/assets}/__init__.py | 0 .../tests}/mysql/assets/test_mysql.py | 0 .../tests/mysql/hooks}/__init__.py | 0 .../tests}/mysql/hooks/test_mysql.py | 3 +- .../hooks/test_mysql_connector_python.py | 0 .../tests/mysql/operators}/__init__.py | 0 .../tests}/mysql/operators/test_mysql.py | 3 +- .../tests/mysql/transfers}/__init__.py | 0 .../mysql/transfers/test_presto_to_mysql.py | 0 .../mysql/transfers/test_s3_to_mysql.py | 0 .../mysql/transfers/test_trino_to_mysql.py | 0 .../mysql/transfers/test_vertica_to_mysql.py | 0 .../tests/neo4j}/__init__.py | 0 .../tests/neo4j/hooks}/__init__.py | 0 .../tests}/neo4j/hooks/test_neo4j.py | 0 .../tests/neo4j/operators}/__init__.py | 0 .../tests}/neo4j/operators/test_neo4j.py | 0 .../tests/odbc}/__init__.py | 0 .../tests/odbc/hooks}/__init__.py | 0 .../tests}/odbc/hooks/test_odbc.py | 3 +- .../tests/openai}/__init__.py | 0 .../tests/openai/hooks}/__init__.py | 0 .../tests}/openai/hooks/test_openai.py | 0 .../tests/openai/operators}/__init__.py | 0 .../tests}/openai/operators/test_openai.py | 0 .../tests}/openai/test_exceptions.py | 0 .../tests/openai/triggers}/__init__.py | 0 .../tests}/openai/triggers/test_openai.py | 0 .../tests/openfaas}/__init__.py | 0 .../tests/openfaas/hooks}/__init__.py | 0 .../tests}/openfaas/hooks/test_openfaas.py | 0 .../tests/openlineage}/__init__.py | 0 .../tests/openlineage/extractors}/__init__.py | 0 .../openlineage/extractors/test_base.py | 2 +- .../openlineage/extractors/test_bash.py | 3 +- .../openlineage/extractors/test_manager.py | 3 +- .../openlineage/extractors/test_python.py | 3 +- .../tests}/openlineage/log_config.py | 0 .../tests/openlineage/plugins}/__init__.py | 0 .../plugins/openlineage_configs}/__init__.py | 0 .../plugins/openlineage_configs/http.yaml | 0 .../openlineage/plugins/test_adapter.py | 5 +- .../openlineage/plugins/test_execution.py | 5 +- .../tests}/openlineage/plugins/test_facets.py | 0 .../openlineage/plugins/test_listener.py | 5 +- .../tests}/openlineage/plugins/test_macros.py | 0 .../openlineage/plugins/test_openlineage.py | 4 +- .../tests}/openlineage/plugins/test_utils.py | 3 +- .../tests}/openlineage/test_conf.py | 3 +- .../tests}/openlineage/test_sqlparser.py | 0 .../tests/openlineage/utils}/__init__.py | 0 .../openlineage/utils/custom_facet_fixture.py | 0 .../utils/test_selective_enable.py | 0 .../tests}/openlineage/utils/test_sql.py | 0 .../tests}/openlineage/utils/test_utils.py | 31 +- .../tests/opensearch}/__init__.py | 0 .../tests}/opensearch/conftest.py | 0 .../tests/opensearch/hooks}/__init__.py | 0 .../opensearch/hooks/test_opensearch.py | 0 .../tests/opensearch/log}/__init__.py | 0 .../opensearch/log/test_os_json_formatter.py | 0 .../tests}/opensearch/log/test_os_response.py | 0 .../opensearch/log/test_os_task_handler.py | 7 +- .../tests/opensearch/operators}/__init__.py | 0 .../opensearch/operators/test_opensearch.py | 0 .../tests/opsgenie}/__init__.py | 0 .../tests/opsgenie}/hooks/__init__.py | 0 .../tests}/opsgenie/hooks/test_opsgenie.py | 0 .../tests/opsgenie/notifications}/__init__.py | 0 .../opsgenie/notifications/test_opsgenie.py | 0 .../tests/opsgenie/operators}/__init__.py | 0 .../opsgenie/operators/test_opsgenie.py | 0 .../tests/opsgenie/typing}/__init__.py | 0 .../tests}/opsgenie/typing/test_opsgenie.py | 0 .../tests/oracle}/__init__.py | 0 .../tests/oracle/hooks}/__init__.py | 0 .../tests}/oracle/hooks/test_oracle.py | 0 .../tests/oracle/operators}/__init__.py | 0 .../tests}/oracle/operators/test_oracle.py | 0 .../tests/oracle/transfers}/__init__.py | 0 .../oracle/transfers/test_oracle_to_oracle.py | 0 .../tests/pagerduty}/__init__.py | 0 .../tests/pagerduty/hooks}/__init__.py | 0 .../tests}/pagerduty/hooks/test_pagerduty.py | 0 .../pagerduty/hooks/test_pagerduty_events.py | 0 .../pagerduty/notifications}/__init__.py | 0 .../pagerduty/notifications/test_pagerduty.py | 0 .../tests/papermill}/__init__.py | 0 .../tests/papermill/hooks}/__init__.py | 0 .../tests}/papermill/hooks/test_kernel.py | 0 .../tests/papermill/operators}/__init__.py | 0 .../papermill/operators/test_papermill.py | 0 .../tests/pgvector}/__init__.py | 0 .../tests/pgvector/hooks}/__init__.py | 0 .../tests}/pgvector/hooks/test_pgvector.py | 0 .../tests/pgvector/operators}/__init__.py | 0 .../pgvector/operators/test_pgvector.py | 0 .../tests/pinecone}/__init__.py | 0 .../tests/pinecone/hooks}/__init__.py | 0 .../tests}/pinecone/hooks/test_pinecone.py | 0 .../tests/pinecone/operators}/__init__.py | 0 .../pinecone/operators/test_pinecone.py | 0 .../tests/postgres}/__init__.py | 0 .../tests/postgres/assets}/__init__.py | 0 .../tests}/postgres/assets/test_postgres.py | 0 .../tests/postgres/hooks}/__init__.py | 0 .../tests}/postgres/hooks/test_postgres.py | 0 .../tests/postgres/operators}/__init__.py | 0 .../postgres/operators/test_postgres.py | 0 .../tests/presto}/__init__.py | 0 .../tests/presto/hooks}/__init__.py | 0 .../tests}/presto/hooks/test_presto.py | 0 .../tests/presto/transfers}/__init__.py | 0 .../presto/transfers/test_gcs_to_presto.py | 0 .../tests/qdrant}/__init__.py | 0 .../tests/qdrant/hooks}/__init__.py | 0 .../tests}/qdrant/hooks/test_qdrant.py | 0 .../tests/qdrant}/operators/__init__.py | 0 .../tests}/qdrant/operators/test_qdrant.py | 0 .../tests/redis}/__init__.py | 0 .../tests/redis/hooks}/__init__.py | 0 .../tests}/redis/hooks/test_redis.py | 0 .../tests/redis/log}/__init__.py | 0 .../redis/log/test_redis_task_handler.py | 3 +- .../tests/redis/operators}/__init__.py | 0 .../redis/operators/test_redis_publish.py | 0 .../tests/redis/sensors}/__init__.py | 0 .../tests}/redis/sensors/test_redis_key.py | 0 .../redis/sensors/test_redis_pub_sub.py | 0 .../tests/salesforce}/__init__.py | 0 .../tests/salesforce/hooks}/__init__.py | 0 .../salesforce/hooks/test_salesforce.py | 0 .../tests/salesforce/operators}/__init__.py | 0 .../tests}/salesforce/operators/test_bulk.py | 0 .../operators/test_salesforce_apex_rest.py | 0 .../tests/samba}/__init__.py | 0 .../tests/samba}/hooks/__init__.py | 0 .../tests}/samba/hooks/test_samba.py | 0 .../tests/samba/transfers}/__init__.py | 0 .../samba/transfers/test_gcs_to_samba.py | 0 .../tests/segment}/__init__.py | 0 .../tests/segment/hooks}/__init__.py | 0 .../tests}/segment/hooks/test_segment.py | 0 .../tests/segment/operators}/__init__.py | 0 .../operators/test_segment_track_event.py | 0 .../tests/sendgrid}/__init__.py | 0 .../tests/sendgrid/utils}/__init__.py | 0 .../tests}/sendgrid/utils/test_emailer.py | 0 .../tests/sftp}/__init__.py | 0 .../tests/sftp/decorators}/__init__.py | 0 .../sftp/decorators/sensors}/__init__.py | 0 .../sftp/decorators/sensors/test_sftp.py | 0 .../tests/sftp/hooks}/__init__.py | 0 .../tests}/sftp/hooks/test_sftp.py | 5 +- .../tests/sftp/operators}/__init__.py | 0 .../tests}/sftp/operators/test_sftp.py | 20 +- .../tests/sftp/sensors}/__init__.py | 0 .../tests}/sftp/sensors/test_sftp.py | 0 .../tests/sftp/triggers}/__init__.py | 0 .../tests}/sftp/triggers/test_sftp.py | 0 .../tests/singularity}/__init__.py | 0 .../tests/singularity}/operators/__init__.py | 0 .../singularity/operators/test_singularity.py | 0 .../tests/slack}/__init__.py | 0 .../tests/slack/hooks}/__init__.py | 0 .../tests}/slack/hooks/test_slack.py | 0 .../tests}/slack/hooks/test_slack_webhook.py | 0 .../tests/slack/notifications}/__init__.py | 0 .../tests}/slack/notifications/test_slack.py | 0 .../slack/notifications/test_slack_webhook.py | 0 .../tests/slack/operators}/__init__.py | 0 .../tests}/slack/operators/test_slack.py | 5 +- .../slack/operators/test_slack_webhook.py | 0 .../tests/slack/transfers}/__init__.py | 0 .../tests}/slack/transfers/conftest.py | 0 .../slack/transfers/test_base_sql_to_slack.py | 0 .../slack/transfers/test_sql_to_slack.py | 0 .../transfers/test_sql_to_slack_webhook.py | 10 +- .../tests/slack/utils}/__init__.py | 0 .../tests}/slack/utils/test_utils.py | 0 .../tests/smtp}/__init__.py | 0 .../tests/smtp/hooks}/__init__.py | 0 .../tests}/smtp/hooks/test_smtp.py | 3 +- .../tests/smtp/notifications}/__init__.py | 0 .../tests}/smtp/notifications/test_smtp.py | 12 +- .../tests/smtp/operators}/__init__.py | 0 .../tests}/smtp/operators/test_smtp.py | 0 .../tests/snowflake}/__init__.py | 0 .../tests/snowflake/decorators}/__init__.py | 0 .../snowflake/decorators/test_snowpark.py | 0 .../tests/snowflake/hooks}/__init__.py | 0 .../tests}/snowflake/hooks/test_snowflake.py | 53 +- .../snowflake/hooks/test_snowflake_sql_api.py | 24 +- .../tests}/snowflake/hooks/test_sql.py | 0 .../tests/snowflake/operators}/__init__.py | 0 .../snowflake/operators/test_snowflake.py | 0 .../snowflake/operators/test_snowflake_sql.py | 0 .../snowflake/operators/test_snowpark.py | 0 .../tests/snowflake/transfers}/__init__.py | 0 .../transfers/test_copy_into_snowflake.py | 0 .../tests/snowflake/triggers}/__init__.py | 0 .../snowflake/triggers/test_snowflake.py | 0 .../tests/snowflake/utils}/__init__.py | 0 .../tests}/snowflake/utils/test_common.py | 0 .../snowflake/utils/test_openlineage.py | 0 .../tests}/snowflake/utils/test_snowpark.py | 0 .../utils/test_sql_api_generate_jwt.py | 0 .../tests/sqlite}/__init__.py | 0 .../tests/sqlite/hooks}/__init__.py | 0 .../tests}/sqlite/hooks/test_sqlite.py | 0 .../tests/sqlite/operators}/__init__.py | 0 .../tests}/sqlite/operators/test_sqlite.py | 0 .../hooks => providers/tests/ssh}/__init__.py | 0 .../tests/ssh/hooks}/__init__.py | 0 .../tests}/ssh/hooks/test_ssh.py | 5 +- .../tests/ssh/operators}/__init__.py | 0 .../tests}/ssh/operators/test_ssh.py | 3 +- .../tests/standard}/__init__.py | 0 .../tests/standard/operators}/__init__.py | 0 .../tests}/standard/operators/test_bash.py | 3 +- .../standard/operators/test_datetime.py | 3 +- .../tests}/standard/operators/test_weekday.py | 3 +- .../tests/standard/sensors}/__init__.py | 0 .../tests}/standard/sensors/test_bash.py | 0 .../tests}/standard/sensors/test_date_time.py | 0 .../tests}/standard/sensors/test_time.py | 0 .../standard/sensors/test_time_delta.py | 0 .../tests}/standard/sensors/test_weekday.py | 3 +- .../tests/system}/__init__.py | 0 .../tests/system/airbyte}/__init__.py | 0 .../airbyte/example_airbyte_trigger_job.py | 2 +- .../tests/system/alibaba}/__init__.py | 0 .../alibaba/example_adb_spark_batch.py | 4 +- .../system}/alibaba/example_adb_spark_sql.py | 4 +- .../system}/alibaba/example_oss_bucket.py | 4 +- .../system}/alibaba/example_oss_object.py | 4 +- .../tests/system}/amazon/CONTRIBUTING.md | 2 +- .../tests/system}/amazon/README.md | 0 .../tests/system/amazon}/__init__.py | 0 .../tests/system/amazon/aws}/__init__.py | 0 .../system}/amazon/aws/example_appflow.py | 7 +- .../system}/amazon/aws/example_appflow_run.py | 7 +- .../system}/amazon/aws/example_athena.py | 7 +- .../amazon/aws/example_azure_blob_to_s3.py | 7 +- .../tests/system}/amazon/aws/example_batch.py | 7 +- .../system}/amazon/aws/example_bedrock.py | 7 +- .../example_bedrock_retrieve_and_generate.py | 7 +- .../amazon/aws/example_cloudformation.py | 7 +- .../system}/amazon/aws/example_comprehend.py | 7 +- .../example_comprehend_document_classifier.py | 7 +- .../system}/amazon/aws/example_datasync.py | 7 +- .../tests/system}/amazon/aws/example_dms.py | 9 +- .../system}/amazon/aws/example_dynamodb.py | 7 +- .../amazon/aws/example_dynamodb_to_s3.py | 7 +- .../tests/system}/amazon/aws/example_ec2.py | 7 +- .../tests/system}/amazon/aws/example_ecs.py | 7 +- .../system}/amazon/aws/example_ecs_fargate.py | 7 +- .../amazon/aws/example_eks_templated.py | 7 +- .../example_eks_with_fargate_in_one_step.py | 9 +- .../aws/example_eks_with_fargate_profile.py | 9 +- .../example_eks_with_nodegroup_in_one_step.py | 9 +- .../amazon/aws/example_eks_with_nodegroups.py | 9 +- .../tests/system}/amazon/aws/example_emr.py | 7 +- .../system}/amazon/aws/example_emr_eks.py | 7 +- .../aws/example_emr_notebook_execution.py | 7 +- .../amazon/aws/example_emr_serverless.py | 7 +- .../system}/amazon/aws/example_eventbridge.py | 5 +- .../system}/amazon/aws/example_ftp_to_s3.py | 7 +- .../system}/amazon/aws/example_gcs_to_s3.py | 7 +- .../amazon/aws/example_glacier_to_gcs.py | 7 +- .../tests/system}/amazon/aws/example_glue.py | 7 +- .../amazon/aws/example_glue_data_quality.py | 7 +- ...e_glue_data_quality_with_recommendation.py | 7 +- .../amazon/aws/example_glue_databrew.py | 7 +- .../aws/example_google_api_sheets_to_s3.py | 7 +- .../aws/example_google_api_youtube_to_s3.py | 7 +- .../amazon/aws/example_hive_to_dynamodb.py | 7 +- .../system}/amazon/aws/example_http_to_s3.py | 7 +- .../aws/example_imap_attachment_to_s3.py | 7 +- .../amazon/aws/example_kinesis_analytics.py | 7 +- .../system}/amazon/aws/example_lambda.py | 7 +- .../system}/amazon/aws/example_local_to_s3.py | 7 +- .../system}/amazon/aws/example_mongo_to_s3.py | 7 +- .../system}/amazon/aws/example_neptune.py | 7 +- .../system}/amazon/aws/example_quicksight.py | 7 +- .../system}/amazon/aws/example_rds_event.py | 7 +- .../system}/amazon/aws/example_rds_export.py | 7 +- .../amazon/aws/example_rds_instance.py | 7 +- .../amazon/aws/example_rds_snapshot.py | 7 +- .../system}/amazon/aws/example_redshift.py | 7 +- .../aws/example_redshift_s3_transfers.py | 7 +- .../tests/system}/amazon/aws/example_s3.py | 7 +- .../amazon/aws/example_s3_to_dynamodb.py | 7 +- .../system}/amazon/aws/example_s3_to_ftp.py | 7 +- .../system}/amazon/aws/example_s3_to_sftp.py | 7 +- .../system}/amazon/aws/example_s3_to_sql.py | 7 +- .../system}/amazon/aws/example_sagemaker.py | 7 +- .../amazon/aws/example_sagemaker_endpoint.py | 9 +- .../amazon/aws/example_sagemaker_notebook.py | 7 +- .../amazon/aws/example_sagemaker_pipeline.py | 9 +- .../amazon/aws/example_salesforce_to_s3.py | 7 +- .../system}/amazon/aws/example_sftp_to_s3.py | 7 +- .../tests/system}/amazon/aws/example_sns.py | 7 +- .../system}/amazon/aws/example_sql_to_s3.py | 7 +- .../tests/system}/amazon/aws/example_sqs.py | 7 +- .../amazon/aws/example_step_functions.py | 7 +- .../system/amazon/aws/tests}/__init__.py | 0 .../amazon/aws/tests/test_aws_auth_manager.py | 20 +- .../system}/amazon/aws/utils/__init__.py | 2 +- .../tests/system}/amazon/aws/utils/ec2.py | 0 .../tests/system}/amazon/aws/utils/k8s.py | 0 .../tests/system/apache}/__init__.py | 0 .../tests/system/apache/beam}/__init__.py | 0 .../tests/system}/apache/beam/example_beam.py | 5 +- .../apache/beam/example_beam_java_flink.py | 5 +- .../apache/beam/example_beam_java_spark.py | 5 +- .../tests/system}/apache/beam/example_go.py | 5 +- .../apache/beam/example_go_dataflow.py | 5 +- .../apache/beam/example_java_dataflow.py | 5 +- .../system}/apache/beam/example_python.py | 5 +- .../apache/beam/example_python_async.py | 5 +- .../apache/beam/example_python_dataflow.py | 5 +- .../tests/system}/apache/beam/utils.py | 0 .../system/apache/cassandra}/__init__.py | 0 .../apache/cassandra/example_cassandra_dag.py | 2 +- .../tests/system/apache/drill}/__init__.py | 0 .../system}/apache/drill/example_drill_dag.py | 2 +- .../tests/system/apache/druid}/__init__.py | 0 .../system}/apache/druid/example_druid_dag.py | 2 +- .../tests/system/apache/hive}/__init__.py | 0 .../apache/hive/example_twitter_README.md | 0 .../apache/hive/example_twitter_dag.py | 4 +- .../tests/system/apache/iceberg}/__init__.py | 0 .../system}/apache/iceberg/example_iceberg.py | 2 +- .../tests/system/apache/kafka}/__init__.py | 0 .../kafka/example_dag_event_listener.py | 2 +- .../apache/kafka/example_dag_hello_kafka.py | 2 +- .../tests/system/apache/kylin}/__init__.py | 0 .../system}/apache/kylin/example_kylin_dag.py | 4 +- .../tests/system/apache/livy}/__init__.py | 0 .../tests/system}/apache/livy/example_livy.py | 4 +- .../tests/system/apache/pig}/__init__.py | 0 .../tests/system}/apache/pig/example_pig.py | 2 +- .../tests/system/apache/pinot}/__init__.py | 0 .../system}/apache/pinot/example_pinot_dag.py | 2 +- .../tests/system/apache/spark}/__init__.py | 0 .../system}/apache/spark/example_pyspark.py | 2 +- .../system}/apache/spark/example_spark_dag.py | 2 +- .../tests/system/asana}/__init__.py | 0 .../tests/system}/asana/example_asana.py | 4 +- .../tests/system/cncf}/__init__.py | 0 .../tests/system/cncf/kubernetes}/__init__.py | 0 .../cncf/kubernetes/example_kubernetes.py | 4 +- .../kubernetes/example_kubernetes_async.py | 4 +- .../example_kubernetes_decorator.py | 2 +- .../cncf/kubernetes/example_kubernetes_job.py | 4 +- .../kubernetes/example_kubernetes_resource.py | 4 +- .../kubernetes/example_spark_kubernetes.py | 4 +- .../example_spark_kubernetes_spark_pi.yaml | 0 .../cncf/kubernetes/spark_job_template.yaml | 0 .../tests/system/cohere}/__init__.py | 0 .../example_cohere_embedding_operator.py | 2 +- .../tests/system/common}/__init__.py | 0 .../tests/system/common/io}/__init__.py | 0 .../io/example_file_transfer_local_to_s3.py | 4 +- .../tests/system/common/sql}/__init__.py | 0 .../sql/example_sql_column_table_check.py | 2 +- .../common/sql/example_sql_execute_query.py | 2 +- .../tests/system/databricks}/__init__.py | 0 .../system}/databricks/example_databricks.py | 4 +- .../databricks/example_databricks_repos.py | 4 +- .../databricks/example_databricks_sensors.py | 4 +- .../databricks/example_databricks_sql.py | 4 +- .../databricks/example_databricks_workflow.py | 4 +- .../tests/system/dbt}/__init__.py | 0 .../tests/system/dbt/cloud}/__init__.py | 0 .../system}/dbt/cloud/example_dbt_cloud.py | 7 +- .../tests/system/dingding}/__init__.py | 0 .../system}/dingding/example_dingding.py | 4 +- .../tests/system/docker}/__init__.py | 0 .../tests/system}/docker/example_docker.py | 2 +- .../docker/example_docker_copy_data.py | 2 +- .../system}/docker/example_docker_swarm.py | 2 +- .../example_taskflow_api_docker_virtualenv.py | 2 +- .../tests/system/elasticsearch}/__init__.py | 0 .../example_elasticsearch_query.py | 2 +- .../tests/system/ftp}/__init__.py | 0 .../tests/system}/ftp/example_ftp.py | 4 +- .../tests/system/github}/__init__.py | 0 .../tests/system}/github/example_github.py | 2 +- .../tests/system}/google/README.md | 0 .../tests/system}/google/__init__.py | 0 .../tests/system/google/ads}/__init__.py | 0 .../tests/system}/google/ads/example_ads.py | 7 +- .../tests/system/google/cloud}/__init__.py | 0 .../system/google/cloud/automl}/__init__.py | 0 .../cloud/automl/example_automl_dataset.py | 4 +- .../automl/example_automl_translation.py | 4 +- .../example_automl_video_classification.py | 4 +- .../automl/example_automl_video_tracking.py | 4 +- .../example_automl_vision_classification.py | 4 +- .../example_automl_vision_object_detection.py | 4 +- .../cloud/automl/resources}/__init__.py | 0 .../system/google/cloud/azure}/__init__.py | 0 .../cloud/azure/example_azure_blob_to_gcs.py | 4 +- .../azure/example_azure_fileshare_to_gcs.py | 7 +- .../system/google/cloud/bigquery}/__init__.py | 0 .../bigquery/example_bigquery_dataset.py | 4 +- .../cloud/bigquery/example_bigquery_dts.py | 7 +- .../bigquery/example_bigquery_operations.py | 4 +- .../example_bigquery_operations_location.py | 4 +- .../bigquery/example_bigquery_queries.py | 7 +- .../example_bigquery_queries_async.py | 4 +- .../bigquery/example_bigquery_sensors.py | 4 +- .../cloud/bigquery/example_bigquery_tables.py | 7 +- .../bigquery/example_bigquery_to_bigquery.py | 4 +- .../cloud/bigquery/example_bigquery_to_gcs.py | 7 +- .../bigquery/example_bigquery_to_gcs_async.py | 7 +- .../bigquery/example_bigquery_to_mssql.py | 4 +- .../bigquery/example_bigquery_to_mysql.py | 4 +- .../bigquery/example_bigquery_to_postgres.py | 4 +- .../bigquery/example_bigquery_transfer.py | 7 +- .../bigquery/example_bigquery_value_check.py | 6 +- .../cloud/bigquery/resources}/__init__.py | 0 .../resources/example_bigquery_query.sql | 0 .../resources/update_table_schema.json | 0 .../cloud/bigquery/resources/us-states.csv | 0 .../system/google/cloud/bigtable}/__init__.py | 0 .../google/cloud/bigtable/example_bigtable.py | 7 +- .../google/cloud/cloud_batch}/__init__.py | 0 .../cloud/cloud_batch/example_cloud_batch.py | 7 +- .../google/cloud/cloud_build}/__init__.py | 0 .../cloud/cloud_build/example_cloud_build.py | 9 +- .../example_cloud_build_trigger.py | 9 +- .../cloud/cloud_build/resources}/__init__.py | 0 .../resources/example_cloud_build.yaml | 0 .../google/cloud/cloud_functions}/__init__.py | 0 .../cloud_functions/example_functions.py | 7 +- .../cloud/cloud_memorystore}/__init__.py | 0 .../example_cloud_memorystore_memcached.py | 4 +- .../example_cloud_memorystore_redis.py | 7 +- .../google/cloud/cloud_run}/__init__.py | 0 .../cloud/cloud_run/example_cloud_run.py | 4 +- .../cloud_run/example_cloud_run_service.py | 4 +- .../google/cloud/cloud_sql}/__init__.py | 0 .../cloud/cloud_sql/example_cloud_sql.py | 7 +- .../cloud_sql/example_cloud_sql_query.py | 7 +- .../cloud_sql/example_cloud_sql_query_ssl.py | 7 +- .../system/google/cloud/composer}/__init__.py | 0 .../cloud/composer/example_cloud_composer.py | 4 +- .../system/google/cloud/compute}/__init__.py | 0 .../google/cloud/compute/example_compute.py | 7 +- .../cloud/compute/example_compute_igm.py | 7 +- .../cloud/compute/example_compute_ssh.py | 7 +- .../compute/example_compute_ssh_os_login.py | 7 +- .../compute/example_compute_ssh_parallel.py | 7 +- .../cloud/data_loss_prevention}/__init__.py | 0 .../example_dlp_deidentify_content.py | 7 +- .../example_dlp_info_types.py | 7 +- .../example_dlp_inspect_template.py | 7 +- .../data_loss_prevention/example_dlp_job.py | 7 +- .../example_dlp_job_trigger.py | 7 +- .../resources}/__init__.py | 0 .../resources/dictionary.txt | 0 .../system/google/cloud/dataflow}/__init__.py | 0 .../cloud/dataflow/example_dataflow_go.py | 2 +- .../dataflow/example_dataflow_native_java.py | 4 +- .../example_dataflow_native_python.py | 4 +- .../example_dataflow_native_python_async.py | 4 +- .../dataflow/example_dataflow_pipeline.py | 4 +- .../example_dataflow_sensors_deferrable.py | 4 +- .../cloud/dataflow/example_dataflow_sql.py | 7 +- .../example_dataflow_streaming_python.py | 4 +- .../dataflow/example_dataflow_template.py | 7 +- .../cloud/dataflow/example_dataflow_yaml.py | 7 +- .../cloud/dataflow}/resources/__init__.py | 0 .../google/cloud/dataflow/resources/input.csv | 0 .../cloud/dataflow/resources/schema.json | 0 .../google/cloud/dataflow/resources/text.txt | 0 .../cloud/dataflow/resources/wordcount.go | 0 .../system/google/cloud/dataform}/__init__.py | 0 .../google/cloud/dataform/example_dataform.py | 7 +- .../google/cloud/datafusion}/__init__.py | 0 .../cloud/datafusion/example_datafusion.py | 7 +- .../google/cloud/datapipelines}/__init__.py | 0 .../datapipelines/example_datapipeline.py | 7 +- .../system/google/cloud/dataplex}/__init__.py | 0 .../google/cloud/dataplex/example_dataplex.py | 7 +- .../cloud/dataplex/example_dataplex_dp.py | 7 +- .../cloud/dataplex/example_dataplex_dq.py | 7 +- .../system/google/cloud/dataprep}/__init__.py | 0 .../google/cloud/dataprep/example_dataprep.py | 7 +- .../system/google/cloud/dataproc}/__init__.py | 0 .../cloud/dataproc/example_dataproc_batch.py | 7 +- .../example_dataproc_batch_deferrable.py | 7 +- .../example_dataproc_batch_persistent.py | 7 +- ...cluster_create_existing_stopped_cluster.py | 7 +- .../example_dataproc_cluster_deferrable.py | 7 +- .../example_dataproc_cluster_diagnose.py | 7 +- .../example_dataproc_cluster_generator.py | 7 +- .../example_dataproc_cluster_start_stop.py | 7 +- .../example_dataproc_cluster_update.py | 7 +- .../cloud/dataproc/example_dataproc_flink.py | 7 +- .../cloud/dataproc/example_dataproc_gke.py | 7 +- .../cloud/dataproc/example_dataproc_hadoop.py | 7 +- .../cloud/dataproc/example_dataproc_hive.py | 7 +- .../cloud/dataproc/example_dataproc_pig.py | 7 +- .../cloud/dataproc/example_dataproc_presto.py | 7 +- .../dataproc/example_dataproc_pyspark.py | 7 +- .../cloud/dataproc/example_dataproc_spark.py | 7 +- .../dataproc/example_dataproc_spark_async.py | 7 +- .../example_dataproc_spark_deferrable.py | 7 +- .../dataproc/example_dataproc_spark_sql.py | 7 +- .../cloud/dataproc/example_dataproc_sparkr.py | 7 +- .../cloud/dataproc/example_dataproc_trino.py | 7 +- .../dataproc/example_dataproc_workflow.py | 7 +- .../example_dataproc_workflow_deferrable.py | 7 +- .../cloud/dataproc_metastore}/__init__.py | 0 .../example_dataproc_metastore.py | 7 +- .../example_dataproc_metastore_backup.py | 7 +- ...ataproc_metastore_hive_partition_sensor.py | 7 +- .../google/cloud/datastore}/__init__.py | 0 .../datastore/example_datastore_commit.py | 7 +- .../datastore/example_datastore_query.py | 7 +- .../datastore/example_datastore_rollback.py | 7 +- .../system/google/cloud/gcs}/__init__.py | 0 .../cloud/gcs/example_calendar_to_gcs.py | 4 +- .../google/cloud/gcs/example_firestore.py | 4 +- .../google/cloud/gcs/example_gcs_acl.py | 7 +- .../cloud/gcs/example_gcs_copy_delete.py | 7 +- .../google/cloud/gcs/example_gcs_sensor.py | 7 +- .../cloud/gcs/example_gcs_to_bigquery.py | 7 +- .../gcs/example_gcs_to_bigquery_async.py | 7 +- .../google/cloud/gcs/example_gcs_to_gcs.py | 7 +- .../google/cloud/gcs/example_gcs_to_gdrive.py | 7 +- .../google/cloud/gcs/example_gcs_to_sheets.py | 4 +- .../google/cloud/gcs/example_gcs_transform.py | 7 +- .../gcs/example_gcs_transform_timespan.py | 7 +- .../cloud/gcs/example_gcs_upload_download.py | 7 +- .../google/cloud/gcs/example_gdrive_to_gcs.py | 7 +- .../google/cloud/gcs/example_mssql_to_gcs.py | 7 +- .../google/cloud/gcs/example_mysql_to_gcs.py | 4 +- .../google/cloud/gcs/example_oracle_to_gcs.py | 7 +- .../google/cloud/gcs/example_s3_to_gcs.py | 7 +- .../google/cloud/gcs/example_sftp_to_gcs.py | 7 +- .../google/cloud/gcs/example_sheets.py | 4 +- .../google/cloud/gcs/example_sheets_to_gcs.py | 4 +- .../google/cloud/gcs/example_trino_to_gcs.py | 4 +- .../google/cloud/gcs/resources}/__init__.py | 0 .../cloud/gcs/resources/example_upload.txt | 0 .../google/cloud/gcs/resources/tmp.tar.gz | Bin .../cloud/gcs/resources/transform_script.py | 0 .../cloud/gcs/resources/transform_timespan.py | 0 .../google/cloud/gcs/resources/us-states.csv | 0 .../cloud/kubernetes_engine}/__init__.py | 0 .../example_kubernetes_engine.py | 7 +- .../example_kubernetes_engine_async.py | 7 +- .../example_kubernetes_engine_job.py | 4 +- .../example_kubernetes_engine_kueue.py | 4 +- .../example_kubernetes_engine_resource.py | 4 +- .../google/cloud/life_sciences}/__init__.py | 0 .../life_sciences/example_life_sciences.py | 7 +- .../life_sciences/resources}/__init__.py | 0 .../google/cloud/life_sciences/resources/file | 0 .../google/cloud/ml_engine}/__init__.py | 0 .../cloud/ml_engine/example_mlengine.py | 4 +- .../cloud/natural_language}/__init__.py | 0 .../example_natural_language.py | 4 +- .../system/google/cloud/pubsub}/__init__.py | 0 .../google/cloud/pubsub/example_pubsub.py | 4 +- .../cloud/pubsub/example_pubsub_deferrable.py | 4 +- .../system/google/cloud/spanner}/__init__.py | 0 .../google/cloud/spanner/example_spanner.py | 7 +- .../google/cloud/speech_to_text}/__init__.py | 0 .../speech_to_text/example_speech_to_text.py | 7 +- .../google/cloud/sql_to_sheets}/__init__.py | 0 .../sql_to_sheets/example_sql_to_sheets.py | 4 +- .../google/cloud/stackdriver}/__init__.py | 0 .../cloud/stackdriver/example_stackdriver.py | 7 +- .../cloud/storage_transfer}/__init__.py | 0 ...mple_cloud_storage_transfer_service_aws.py | 7 +- ...mple_cloud_storage_transfer_service_gcp.py | 4 +- ...oud_storage_transfer_service_gcs_to_gcs.py | 4 +- .../storage_transfer/resources}/__init__.py | 0 .../resources/transfer_service_gcp_file | 0 .../transfer_service_gcs_to_gcs_file | 0 .../system/google/cloud/tasks}/__init__.py | 0 .../google/cloud/tasks/example_queue.py | 4 +- .../google/cloud/tasks/example_tasks.py | 4 +- .../google/cloud/text_to_speech}/__init__.py | 0 .../text_to_speech/example_text_to_speech.py | 7 +- .../google/cloud/transfers}/__init__.py | 0 .../cloud/transfers/example_gcs_to_sftp.py | 7 +- .../transfers/example_gdrive_to_local.py | 7 +- .../transfers/example_postgres_to_gcs.py | 4 +- .../cloud/transfers/resources}/__init__.py | 0 .../cloud/transfers/resources/empty.txt | 0 .../google/cloud/translate}/__init__.py | 0 .../cloud/translate/example_translate.py | 4 +- .../cloud/translate_speech}/__init__.py | 0 .../example_translate_speech.py | 4 +- .../google/cloud/vertex_ai}/__init__.py | 0 ..._vertex_ai_auto_ml_forecasting_training.py | 4 +- ...xample_vertex_ai_auto_ml_image_training.py | 4 +- ...example_vertex_ai_auto_ml_list_training.py | 4 +- ...mple_vertex_ai_auto_ml_tabular_training.py | 4 +- ...xample_vertex_ai_auto_ml_video_training.py | 4 +- .../example_vertex_ai_batch_prediction_job.py | 4 +- .../example_vertex_ai_custom_container.py | 4 +- .../vertex_ai/example_vertex_ai_custom_job.py | 4 +- ...ple_vertex_ai_custom_job_python_package.py | 4 +- .../vertex_ai/example_vertex_ai_dataset.py | 4 +- .../vertex_ai/example_vertex_ai_endpoint.py | 4 +- .../example_vertex_ai_generative_model.py | 4 +- ...ample_vertex_ai_generative_model_tuning.py | 4 +- ...ple_vertex_ai_hyperparameter_tuning_job.py | 4 +- .../example_vertex_ai_list_custom_jobs.py | 4 +- .../example_vertex_ai_model_service.py | 4 +- .../example_vertex_ai_pipeline_job.py | 4 +- .../cloud/video_intelligence}/__init__.py | 0 .../example_video_intelligence.py | 4 +- .../system/google/cloud/vision}/__init__.py | 0 .../vision/example_vision_annotate_image.py | 6 +- .../vision/example_vision_autogenerated.py | 6 +- .../cloud/vision/example_vision_explicit.py | 6 +- .../google/cloud/workflows}/__init__.py | 0 .../cloud/workflows/example_workflows.py | 4 +- .../tests/system}/google/conftest.py | 0 .../system/google/datacatalog}/__init__.py | 0 .../example_datacatalog_entries.py | 7 +- .../example_datacatalog_search_catalog.py | 7 +- .../example_datacatalog_tag_templates.py | 7 +- .../datacatalog/example_datacatalog_tags.py | 7 +- .../tests/system/google/firebase}/__init__.py | 0 .../tests/system/google/leveldb}/__init__.py | 0 .../system}/google/leveldb/example_leveldb.py | 4 +- .../google/marketing_platform}/__init__.py | 0 .../example_analytics_admin.py | 4 +- .../example_campaign_manager.py | 7 +- .../marketing_platform/example_search_ads.py | 2 +- .../tests/system/google/suite}/__init__.py | 0 .../google/suite/example_local_to_drive.py | 4 +- .../google/suite/resources}/__init__.py | 0 .../system}/google/suite/resources/test1 | 0 .../system}/google/suite/resources/test2 | 0 .../system/google/workplace}/__init__.py | 0 .../tests/system/http}/__init__.py | 0 .../tests/system}/http/example_http.py | 2 +- .../tests/system/influxdb}/__init__.py | 0 .../system}/influxdb/example_influxdb.py | 4 +- .../influxdb/example_influxdb_query.py | 2 +- .../tests/system/jdbc}/__init__.py | 0 .../system}/jdbc/example_jdbc_queries.py | 4 +- .../tests/system/jenkins}/__init__.py | 0 .../jenkins/example_jenkins_job_trigger.py | 2 +- .../tests/system/microsoft}/__init__.py | 0 .../tests/system/microsoft/azure}/__init__.py | 0 .../azure/example_adf_run_pipeline.py | 4 +- .../microsoft/azure/example_adls_create.py | 4 +- .../microsoft/azure/example_adls_delete.py | 4 +- .../microsoft/azure/example_adls_list.py | 4 +- .../azure/example_azure_batch_operator.py | 2 +- .../example_azure_container_instances.py | 2 +- .../microsoft/azure/example_azure_cosmosdb.py | 4 +- .../azure/example_azure_service_bus.py | 4 +- .../microsoft/azure/example_azure_synapse.py | 2 +- .../microsoft/azure/example_fileshare.py | 4 +- .../microsoft/azure/example_local_to_adls.py | 4 +- .../microsoft/azure/example_local_to_wasb.py | 4 +- .../microsoft/azure/example_msfabric.py | 4 +- .../microsoft/azure/example_msgraph.py | 4 +- .../microsoft/azure/example_powerbi.py | 4 +- .../azure/example_powerbi_dataset_refresh.py | 4 +- .../microsoft/azure/example_s3_to_wasb.py | 7 +- .../microsoft/azure/example_sftp_to_wasb.py | 4 +- .../azure/example_synapse_run_pipeline.py | 4 +- .../microsoft/azure/example_wasb_sensors.py | 2 +- .../tests/system/microsoft/mssql}/__init__.py | 0 .../system}/microsoft/mssql/create_table.sql | 0 .../system}/microsoft/mssql/example_mssql.py | 4 +- .../tests/system/microsoft/winrm}/__init__.py | 0 .../system}/microsoft/winrm/example_winrm.py | 4 +- .../tests/system/mysql}/__init__.py | 0 .../tests/system}/mysql/example_mysql.py | 2 +- .../tests/system/neo4j}/__init__.py | 0 .../tests/system}/neo4j/example_neo4j.py | 2 +- .../tests/system/openai}/__init__.py | 0 .../tests/system}/openai/example_openai.py | 2 +- .../openai/example_trigger_batch_operator.py | 2 +- .../tests/system/opensearch}/__init__.py | 0 .../system}/opensearch/example_opensearch.py | 4 +- .../tests/system/opsgenie}/__init__.py | 0 .../opsgenie/example_opsgenie_alert.py | 2 +- .../opsgenie/example_opsgenie_notifier.py | 2 +- .../tests/system/papermill}/__init__.py | 0 .../tests/system}/papermill/conftest.py | 0 .../system}/papermill/example_papermill.py | 2 +- .../example_papermill_remote_verify.py | 2 +- .../papermill/example_papermill_verify.py | 2 +- .../system}/papermill/input_notebook.ipynb | 0 .../tests/system/pgvector}/__init__.py | 0 .../system}/pgvector/example_pgvector.py | 2 +- .../pgvector/example_pgvector_openai.py | 2 +- .../tests/system/pinecone}/__init__.py | 0 .../pinecone/example_create_pod_index.py | 2 +- .../example_create_serverless_index.py | 2 +- .../system}/pinecone/example_dag_pinecone.py | 2 +- .../pinecone/example_pinecone_cohere.py | 2 +- .../pinecone/example_pinecone_openai.py | 2 +- .../tests/system/postgres}/__init__.py | 0 .../system}/postgres/example_postgres.py | 4 +- .../tests/system/presto}/__init__.py | 0 .../system}/presto/example_gcs_to_presto.py | 2 +- .../tests/system/qdrant}/__init__.py | 0 .../system}/qdrant/example_dag_qdrant.py | 2 +- .../tests/system/redis}/__init__.py | 0 .../system}/redis/example_redis_publish.py | 4 +- .../tests/system/salesforce}/__init__.py | 0 .../tests/system}/salesforce/example_bulk.py | 2 +- .../example_salesforce_apex_rest.py | 2 +- .../tests/system/samba}/__init__.py | 0 .../system}/samba/example_gcs_to_samba.py | 7 +- .../tests/system/sftp}/__init__.py | 0 .../tests/system}/sftp/example_sftp_sensor.py | 4 +- .../tests/system/singularity}/__init__.py | 0 .../singularity/example_singularity.py | 2 +- .../tests/system/slack}/__init__.py | 0 .../tests/system}/slack/example_slack.py | 2 +- .../system}/slack/example_slack_webhook.py | 2 +- .../system}/slack/example_sql_to_slack.py | 2 +- .../slack/example_sql_to_slack_webhook.py | 2 +- .../tests/system/snowflake}/__init__.py | 0 .../snowflake/example_copy_into_snowflake.py | 2 +- .../system}/snowflake/example_snowflake.py | 2 +- ...e_snowflake_snowflake_op_template_file.sql | 0 .../snowflake/example_snowpark_decorator.py | 3 +- .../snowflake/example_snowpark_operator.py | 3 +- .../tests/system/sqlite}/__init__.py | 0 .../tests/system}/sqlite/create_table.sql | 0 .../tests/system}/sqlite/example_sqlite.py | 4 +- .../tests/system/tableau}/__init__.py | 0 .../tests/system}/tableau/example_tableau.py | 2 +- .../tests/system/telegram}/__init__.py | 0 .../system}/telegram/example_telegram.py | 2 +- .../tests/system/teradata}/__init__.py | 0 .../system}/teradata/create_ssl_table.sql | 0 .../tests/system}/teradata/create_table.sql | 0 ...example_azure_blob_to_teradata_transfer.py | 4 +- .../example_s3_to_teradata_transfer.py | 4 +- .../system}/teradata/example_ssl_teradata.py | 4 +- .../system}/teradata/example_teradata.py | 4 +- .../teradata/example_teradata_call_sp.py | 4 +- .../example_teradata_compute_cluster.py | 4 +- .../example_teradata_to_teradata_transfer.py | 4 +- .../tests/system/trino}/__init__.py | 0 .../system}/trino/example_gcs_to_trino.py | 2 +- .../tests/system}/trino/example_trino.py | 2 +- .../tests/system/weaviate}/__init__.py | 0 .../weaviate/example_weaviate_cohere.py | 2 +- .../example_weaviate_dynamic_mapping_dag.py | 2 +- .../weaviate/example_weaviate_openai.py | 2 +- .../weaviate/example_weaviate_operator.py | 2 +- .../weaviate/example_weaviate_using_hook.py | 2 +- .../example_weaviate_vectorizer_dag.py | 2 +- ...example_weaviate_without_vectorizer_dag.py | 2 +- .../weaviate/jeopardy_data_with_vectors.json | 0 .../jeopardy_data_without_vectors.json | 0 .../jeopardy_doc_data_without_vectors.json | 0 .../tests/system/yandex}/__init__.py | 0 .../system}/yandex/example_yandexcloud.py | 7 +- .../yandex/example_yandexcloud_dataproc.py | 7 +- ...xample_yandexcloud_dataproc_lightweight.py | 7 +- .../system}/yandex/example_yandexcloud_yq.py | 7 +- .../tests/system/ydb}/__init__.py | 0 .../tests/system}/ydb/example_ydb.py | 4 +- .../tests/system/zendesk}/__init__.py | 0 .../zendesk/example_zendesk_custom_get.py | 2 +- .../tests/tableau}/__init__.py | 0 .../tests/tableau/hooks}/__init__.py | 0 .../tests}/tableau/hooks/test_tableau.py | 22 +- .../tests/tableau/operators}/__init__.py | 0 .../tests}/tableau/operators/test_tableau.py | 0 .../tests/tableau/sensors}/__init__.py | 0 .../tests}/tableau/sensors/test_tableau.py | 0 .../tests/telegram}/__init__.py | 0 .../tests/telegram/hooks}/__init__.py | 0 .../tests}/telegram/hooks/test_telegram.py | 0 .../tests/telegram/operators}/__init__.py | 0 .../telegram/operators/test_telegram.py | 0 .../tests/teradata}/__init__.py | 0 .../tests/teradata/hooks}/__init__.py | 0 .../tests}/teradata/hooks/test_teradata.py | 9 +- .../tests/teradata/operators}/__init__.py | 0 .../teradata/operators/test_teradata.py | 0 .../test_teradata_compute_cluster.py | 0 .../tests/teradata/transfers}/__init__.py | 0 .../transfers/test_azure_blob_to_teradata.py | 0 .../teradata/transfers/test_s3_to_teradata.py | 0 .../transfers/test_teradata_to_teradata.py | 0 .../tests/teradata/triggers}/__init__.py | 0 .../triggers/test_teradata_compute_cluster.py | 0 .../tests/teradata/utils}/__init__.py | 0 .../tests}/teradata/utils/test_constants.py | 0 .../tests/trino}/__init__.py | 0 .../tests/trino/assets}/__init__.py | 0 .../tests}/trino/assets/test_trino.py | 0 .../tests/trino/hooks}/__init__.py | 0 .../tests}/trino/hooks/test_trino.py | 0 .../tests/trino/operators}/__init__.py | 0 .../tests}/trino/operators/test_trino.py | 0 .../tests/trino/transfers}/__init__.py | 0 .../trino/transfers/test_gcs_to_trino.py | 0 .../tests/vertica}/__init__.py | 0 .../tests/vertica/hooks}/__init__.py | 0 .../tests}/vertica/hooks/test_vertica.py | 0 .../tests/vertica/operators}/__init__.py | 0 .../tests}/vertica/operators/test_vertica.py | 0 .../tests/weaviate}/__init__.py | 0 .../tests/weaviate/hooks}/__init__.py | 0 .../tests}/weaviate/hooks/test_weaviate.py | 0 .../tests/weaviate/operators}/__init__.py | 0 .../weaviate/operators/test_weaviate.py | 0 .../tests/yandex}/__init__.py | 0 .../tests/yandex/hooks}/__init__.py | 0 .../tests}/yandex/hooks/test_dataproc.py | 0 .../tests}/yandex/hooks/test_yandex.py | 21 +- .../tests}/yandex/hooks/test_yq.py | 0 .../tests/yandex/links}/__init__.py | 0 .../tests}/yandex/links/test_yq.py | 5 +- .../tests/yandex/operators}/__init__.py | 0 .../tests}/yandex/operators/test_dataproc.py | 0 .../tests}/yandex/operators/test_yq.py | 2 +- .../tests/yandex/secrets}/__init__.py | 0 .../tests}/yandex/secrets/test_lockbox.py | 0 .../tests/yandex/utils}/__init__.py | 0 .../tests}/yandex/utils/test_credentials.py | 0 .../tests}/yandex/utils/test_defaults.py | 0 .../tests}/yandex/utils/test_fields.py | 0 .../tests}/yandex/utils/test_user_agent.py | 0 .../tests/ydb}/__init__.py | 0 .../tests/ydb/hooks}/__init__.py | 0 .../tests}/ydb/hooks/test_ydb.py | 0 .../tests/ydb/operators}/__init__.py | 0 .../tests}/ydb/operators/test_ydb.py | 0 .../tests/ydb/utils/__init__.py | 0 .../tests}/ydb/utils/test_credentials.py | 0 .../tests/ydb/utils/test_defaults.py | 1 - .../tests/zendesk}/__init__.py | 1 - .../tests/zendesk/hooks}/__init__.py | 1 - .../tests}/zendesk/hooks/test_zendesk.py | 0 pyproject.toml | 93 +- scripts/ci/docker-compose/local.yml | 3 + scripts/ci/kubernetes/k8s_requirements.txt | 4 +- ...eck_providers_subpackages_all_have_init.py | 38 +- scripts/ci/pre_commit/check_system_tests.py | 8 +- .../check_system_tests_hidden_in_index.py | 4 +- scripts/ci/pre_commit/mypy_folder.py | 16 +- .../pre_commit/update_common_sql_api_stubs.py | 8 +- .../pre_commit/update_example_dags_paths.py | 13 +- .../update_providers_dependencies.py | 26 +- scripts/ci/pre_commit/version_heads_map.py | 5 +- scripts/docker/install_airflow.sh | 7 +- scripts/in_container/install_devel_deps.py | 3 +- scripts/in_container/run_mypy.sh | 4 +- .../run_provider_yaml_files_check.py | 36 +- tests/always/test_connection.py | 3 +- tests/always/test_example_dags.py | 31 +- tests/always/test_project_structure.py | 263 ++- tests/always/test_secrets.py | 5 +- tests/always/test_secrets_backends.py | 3 +- tests/always/test_secrets_local_filesystem.py | 3 +- tests/api_connexion/conftest.py | 7 +- .../endpoints/test_asset_endpoint.py | 11 +- .../endpoints/test_backfill_endpoint.py | 10 +- .../endpoints/test_config_endpoint.py | 4 +- .../endpoints/test_connection_endpoint.py | 9 +- .../endpoints/test_dag_endpoint.py | 9 +- .../endpoints/test_dag_parsing.py | 5 +- .../endpoints/test_dag_run_endpoint.py | 11 +- .../endpoints/test_dag_source_endpoint.py | 5 +- .../endpoints/test_dag_stats_endpoint.py | 5 +- .../endpoints/test_dag_warning_endpoint.py | 5 +- .../endpoints/test_event_log_endpoint.py | 7 +- .../endpoints/test_extra_link_endpoint.py | 11 +- .../endpoints/test_import_error_endpoint.py | 9 +- .../endpoints/test_log_endpoint.py | 5 +- .../test_mapped_task_instance_endpoint.py | 7 +- .../endpoints/test_plugin_endpoint.py | 9 +- .../endpoints/test_pool_endpoint.py | 9 +- .../endpoints/test_provider_endpoint.py | 3 +- .../endpoints/test_task_endpoint.py | 5 +- .../endpoints/test_task_instance_endpoint.py | 7 +- .../endpoints/test_variable_endpoint.py | 9 +- .../endpoints/test_xcom_endpoint.py | 7 +- .../schemas/test_connection_schema.py | 3 +- .../schemas/test_dag_run_schema.py | 5 +- .../schemas/test_dataset_schema.py | 3 +- .../schemas/test_error_schema.py | 5 +- .../schemas/test_plugin_schema.py | 3 +- .../schemas/test_pool_schemas.py | 3 +- .../api_connexion/schemas/test_xcom_schema.py | 3 +- tests/api_connexion/test_auth.py | 7 +- tests/api_connexion/test_parameters.py | 3 +- tests/api_connexion/test_security.py | 2 +- .../views/public/test_connections.py | 3 +- tests/api_fastapi/views/public/test_dags.py | 3 +- .../views/public/test_variables.py | 3 +- tests/api_fastapi/views/ui/test_assets.py | 3 +- tests/api_fastapi/views/ui/test_dashboard.py | 3 +- .../endpoints/test_rpc_api_endpoint.py | 5 +- tests/api_internal/test_internal_api_call.py | 3 +- tests/assets/test_manager.py | 2 +- tests/assets/tests_asset.py | 2 +- tests/auth/managers/simple/views/test_auth.py | 3 +- tests/cli/commands/test_celery_command.py | 3 +- tests/cli/commands/test_config_command.py | 3 +- tests/cli/commands/test_connection_command.py | 3 +- tests/cli/commands/test_dag_command.py | 5 +- .../commands/test_dag_processor_command.py | 3 +- tests/cli/commands/test_info_command.py | 3 +- .../cli/commands/test_internal_api_command.py | 3 +- tests/cli/commands/test_jobs_command.py | 3 +- tests/cli/commands/test_kerberos_command.py | 3 +- tests/cli/commands/test_kubernetes_command.py | 3 +- tests/cli/commands/test_plugins_command.py | 15 +- .../test_rotate_fernet_key_command.py | 5 +- tests/cli/commands/test_scheduler_command.py | 3 +- tests/cli/commands/test_task_command.py | 7 +- tests/cli/commands/test_variable_command.py | 3 +- tests/cli/commands/test_webserver_command.py | 3 +- tests/cli/conftest.py | 6 +- tests/cli/test_cli_parser.py | 3 +- tests/conftest.py | 1425 +--------------- tests/core/test_configuration.py | 5 +- tests/core/test_core.py | 3 +- tests/core/test_example_dags_system.py | 2 +- tests/core/test_impersonation_tests.py | 3 +- tests/core/test_logging_config.py | 3 +- tests/core/test_otel_tracer.py | 3 +- tests/core/test_sentry.py | 3 +- tests/core/test_settings.py | 3 +- tests/core/test_sqlalchemy_config.py | 3 +- tests/core/test_stats.py | 3 +- tests/dag_processing/test_job_runner.py | 7 +- tests/dag_processing/test_processor.py | 11 +- tests/dags/test_miscellaneous.py | 3 +- tests/dags/test_sensor.py | 3 +- tests/decorators/test_bash.py | 3 +- tests/decorators/test_python.py | 3 +- tests/deprecations_ignore.yml | 113 -- tests/executors/test_base_executor.py | 2 +- tests/executors/test_executor_loader.py | 3 +- .../cli/commands/test_celery_command.py | 3 +- .../executors/test_celery_executor.py | 3 +- tests/integration/security/test_kerberos.py | 3 +- tests/jobs/test_backfill_job.py | 9 +- tests/jobs/test_base_job.py | 3 +- tests/jobs/test_local_task_job.py | 11 +- tests/jobs/test_scheduler_job.py | 15 +- tests/jobs/test_triggerer_job.py | 3 +- tests/jobs/test_triggerer_job_logging.py | 3 +- tests/lineage/test_hook.py | 3 +- tests/lineage/test_lineage.py | 3 +- tests/listeners/class_listener.py | 3 +- .../test_dag_import_error_listener.py | 7 +- tests/models/test_backfill.py | 8 +- tests/models/test_base.py | 3 +- tests/models/test_baseoperator.py | 3 +- tests/models/test_cleartasks.py | 5 +- tests/models/test_dag.py | 24 +- tests/models/test_dagbag.py | 7 +- tests/models/test_dagcode.py | 3 +- tests/models/test_dagrun.py | 9 +- tests/models/test_dagwarning.py | 3 +- tests/models/test_mappedoperator.py | 9 +- tests/models/test_param.py | 3 +- tests/models/test_pool.py | 8 +- tests/models/test_renderedtifields.py | 5 +- tests/models/test_serialized_dag.py | 5 +- tests/models/test_skipmixin.py | 3 +- tests/models/test_taskinstance.py | 11 +- tests/models/test_timestamp.py | 3 +- tests/models/test_trigger.py | 3 +- tests/models/test_variable.py | 5 +- tests/models/test_xcom.py | 5 +- tests/models/test_xcom_arg.py | 5 +- tests/operators/test_branch_operator.py | 3 +- tests/operators/test_email.py | 3 +- tests/operators/test_generic_transfer.py | 2 +- tests/operators/test_latest_only_operator.py | 5 +- tests/operators/test_python.py | 7 +- tests/plugins/test_plugin.py | 3 +- tests/plugins/test_plugins_manager.py | 5 +- tests/secrets/test_cache.py | 3 +- tests/security/test_kerberos.py | 3 +- tests/sensors/test_base.py | 5 +- tests/sensors/test_external_task_sensor.py | 7 +- tests/serialization/test_dag_serialization.py | 57 +- tests/serialization/test_pydantic_models.py | 3 +- tests/serialization/test_serde.py | 3 +- .../serialization/test_serialized_objects.py | 3 +- .../example_external_task_child_deferrable.py | 2 +- ...example_external_task_parent_deferrable.py | 4 +- tests/system/example_empty.py | 4 +- .../providers/microsoft/azure/__init__.py | 17 - tests/system/providers/papermill/__init__.py | 17 - .../task_runner/test_standard_task_runner.py | 5 +- .../deps/test_pool_slots_available_dep.py | 3 +- tests/ti_deps/deps/test_prev_dagrun_dep.py | 5 +- .../deps/test_ready_to_reschedule_dep.py | 3 +- tests/utils/log/test_log_reader.py | 5 +- tests/utils/log/test_secrets_masker.py | 3 +- tests/utils/log/test_task_context_logger.py | 5 +- tests/utils/test_db.py | 3 +- tests/utils/test_db_cleanup.py | 8 +- tests/utils/test_db_manager.py | 3 +- tests/utils/test_dot_renderer.py | 5 +- tests/utils/test_email.py | 3 +- tests/utils/test_file.py | 3 +- tests/utils/test_helpers.py | 5 +- tests/utils/test_log_handlers.py | 5 +- tests/utils/test_net.py | 3 +- tests/utils/test_serve_logs.py | 3 +- tests/utils/test_sqlalchemy.py | 3 +- tests/utils/test_state.py | 3 +- tests/utils/test_task_group.py | 5 +- ...test_task_handler_with_custom_formatter.py | 7 +- tests/utils/test_types.py | 3 +- tests/www/test_app.py | 5 +- tests/www/test_utils.py | 8 +- tests/www/views/conftest.py | 13 +- .../www/views/test_anonymous_as_admin_role.py | 3 +- tests/www/views/test_session.py | 5 +- tests/www/views/test_views.py | 15 +- tests/www/views/test_views_acl.py | 15 +- tests/www/views/test_views_base.py | 7 +- .../www/views/test_views_cluster_activity.py | 3 +- tests/www/views/test_views_configuration.py | 5 +- tests/www/views/test_views_connection.py | 7 +- .../www/views/test_views_custom_user_views.py | 11 +- tests/www/views/test_views_dagrun.py | 13 +- tests/www/views/test_views_dataset.py | 5 +- tests/www/views/test_views_decorators.py | 11 +- tests/www/views/test_views_extra_links.py | 7 +- tests/www/views/test_views_grid.py | 7 +- tests/www/views/test_views_home.py | 13 +- tests/www/views/test_views_log.py | 11 +- tests/www/views/test_views_mount.py | 3 +- tests/www/views/test_views_paused.py | 3 +- tests/www/views/test_views_pool.py | 3 +- tests/www/views/test_views_rate_limit.py | 7 +- tests/www/views/test_views_rendered.py | 13 +- tests/www/views/test_views_robots.py | 2 +- tests/www/views/test_views_task_norun.py | 2 +- tests/www/views/test_views_tasks.py | 15 +- tests/www/views/test_views_trigger_dag.py | 7 +- tests/www/views/test_views_variable.py | 5 +- 4405 files changed, 7324 insertions(+), 6196 deletions(-) delete mode 100644 airflow/providers/.gitignore rename scripts/ci/pre_commit/check_providers_init.py => dev/tests_common/__init__.py (62%) mode change 100755 => 100644 rename {airflow/providers/airbyte/sensors => dev/tests_common/_internals}/__init__.py (100%) rename {tests => dev/tests_common}/_internals/capture_warnings.py (100%) rename {tests => dev/tests_common}/_internals/forbidden_warnings.py (90%) create mode 100644 dev/tests_common/pyproject.toml create mode 100644 dev/tests_common/pytest_plugin.py rename {tests => dev/tests_common}/test_utils/README.md (67%) rename {tests/system/providers/google/marketing_platform => dev/tests_common/test_utils}/__init__.py (87%) rename {tests => dev/tests_common}/test_utils/api_connexion_utils.py (93%) rename {tests => dev/tests_common}/test_utils/asserts.py (100%) rename {tests => dev/tests_common}/test_utils/azure_system_helpers.py (97%) rename {tests => dev/tests_common}/test_utils/compat.py (100%) rename {tests => dev/tests_common}/test_utils/config.py (100%) rename {tests => dev/tests_common}/test_utils/db.py (89%) rename {tests => dev/tests_common}/test_utils/decorators.py (100%) rename {tests => dev/tests_common}/test_utils/fake_datetime.py (100%) rename {tests => dev/tests_common}/test_utils/gcp_system_helpers.py (90%) rename {tests => dev/tests_common}/test_utils/get_all_tests.py (100%) rename {tests => dev/tests_common}/test_utils/hdfs_utils.py (100%) rename {tests => dev/tests_common}/test_utils/logging_command_executor.py (100%) rename {tests => dev/tests_common}/test_utils/mapping.py (100%) rename {tests => dev/tests_common}/test_utils/mock_executor.py (100%) rename {tests => dev/tests_common}/test_utils/mock_operators.py (98%) rename {tests => dev/tests_common}/test_utils/mock_plugins.py (100%) rename {tests => dev/tests_common}/test_utils/mock_security_manager.py (92%) rename {airflow/providers/airbyte/triggers => dev/tests_common/test_utils/operators}/__init__.py (100%) rename {tests => dev/tests_common}/test_utils/operators/postgres_local_executor.cfg (100%) rename {airflow/providers/alibaba/cloud/log => dev/tests_common/test_utils/perf}/__init__.py (100%) rename {tests => dev/tests_common}/test_utils/perf/perf_kit/__init__.py (100%) rename {tests => dev/tests_common}/test_utils/perf/perf_kit/memory.py (100%) rename {tests => dev/tests_common}/test_utils/perf/perf_kit/python.py (100%) rename {tests => dev/tests_common}/test_utils/perf/perf_kit/repeat_and_time.py (100%) rename {tests => dev/tests_common}/test_utils/perf/perf_kit/sqlalchemy.py (100%) rename {tests => dev/tests_common}/test_utils/permissions.py (100%) rename {tests => dev/tests_common}/test_utils/providers.py (100%) rename {tests => dev/tests_common}/test_utils/remote_user_api_auth_backend.py (100%) rename {tests => dev/tests_common}/test_utils/reset_warning_registry.py (100%) rename {tests => dev/tests_common}/test_utils/salesforce_system_helpers.py (100%) rename {tests => dev/tests_common}/test_utils/sftp_system_helpers.py (100%) rename tests/system/utils/__init__.py => dev/tests_common/test_utils/system_tests.py (100%) rename {tests => dev/tests_common}/test_utils/system_tests_class.py (97%) rename {tests => dev/tests_common}/test_utils/terraform.py (95%) rename {tests => dev/tests_common}/test_utils/timetables.py (100%) rename {tests/system/utils => dev/tests_common/test_utils}/watcher.py (100%) rename {tests => dev/tests_common}/test_utils/www.py (100%) delete mode 100644 docs/exts/provider_init_hack.py create mode 100644 providers/.gitignore rename {tests/test_utils => providers}/__init__.py (74%) create mode 100644 providers/pyproject.toml create mode 100644 providers/src/airflow/providers/.gitignore rename {airflow => providers/src/airflow}/providers/MANAGING_PROVIDERS_LIFECYCLE.rst (98%) rename {airflow => providers/src/airflow}/providers/airbyte/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/airbyte/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/airbyte/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/airbyte/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/airbyte/hooks/airbyte.py (100%) rename {airflow => providers/src/airflow}/providers/airbyte/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/airbyte/operators/airbyte.py (100%) rename {airflow => providers/src/airflow}/providers/airbyte/provider.yaml (100%) rename {airflow/providers/alibaba/cloud/operators => providers/src/airflow/providers/airbyte/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/airbyte/sensors/airbyte.py (100%) rename {airflow/providers/alibaba/cloud/sensors => providers/src/airflow/providers/airbyte/triggers}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/airbyte/triggers/airbyte.py (100%) rename {airflow => providers/src/airflow}/providers/alibaba/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/alibaba/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/alibaba/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/alibaba/cloud/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/alibaba/cloud/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/alibaba/cloud/hooks/analyticdb_spark.py (100%) rename {airflow => providers/src/airflow}/providers/alibaba/cloud/hooks/oss.py (100%) rename {airflow/providers/amazon/aws => providers/src/airflow/providers/alibaba/cloud/log}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/alibaba/cloud/log/oss_task_handler.py (100%) rename {airflow/providers/amazon/aws/assets => providers/src/airflow/providers/alibaba/cloud/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/alibaba/cloud/operators/analyticdb_spark.py (100%) rename {airflow => providers/src/airflow}/providers/alibaba/cloud/operators/oss.py (100%) rename {airflow/providers/amazon/aws/auth_manager => providers/src/airflow/providers/alibaba/cloud/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/alibaba/cloud/sensors/analyticdb_spark.py (100%) rename {airflow => providers/src/airflow}/providers/alibaba/cloud/sensors/oss_key.py (100%) rename {airflow => providers/src/airflow}/providers/alibaba/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/amazon/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/amazon/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/amazon/__init__.py (100%) rename {airflow/providers/amazon/aws/auth_manager/avp => providers/src/airflow/providers/amazon/aws}/__init__.py (100%) rename {airflow/providers/amazon/aws/auth_manager/cli => providers/src/airflow/providers/amazon/aws/assets}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/assets/s3.py (100%) rename {airflow/providers/amazon/aws/auth_manager/security_manager => providers/src/airflow/providers/amazon/aws/auth_manager}/__init__.py (100%) rename {airflow/providers/amazon/aws/auth_manager/views => providers/src/airflow/providers/amazon/aws/auth_manager/avp}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/auth_manager/avp/entities.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/auth_manager/avp/facade.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/auth_manager/avp/schema.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/auth_manager/aws_auth_manager.py (100%) rename {airflow/providers/amazon/aws/executors => providers/src/airflow/providers/amazon/aws/auth_manager/cli}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/auth_manager/cli/avp_commands.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/auth_manager/cli/definition.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/auth_manager/constants.py (100%) rename {airflow/providers/amazon/aws/executors/utils => providers/src/airflow/providers/amazon/aws/auth_manager/security_manager}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/auth_manager/user.py (100%) rename {airflow/providers/amazon/aws/fs => providers/src/airflow/providers/amazon/aws/auth_manager/views}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/auth_manager/views/auth.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/exceptions.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/executors/Dockerfile (100%) rename {airflow/providers/amazon/aws/hooks => providers/src/airflow/providers/amazon/aws/executors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/executors/batch/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/executors/batch/batch_executor.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/executors/batch/batch_executor_config.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/executors/batch/boto_schema.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/executors/batch/utils.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/executors/ecs/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/executors/ecs/boto_schema.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/executors/ecs/ecs_executor.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/executors/ecs/ecs_executor_config.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/executors/ecs/utils.py (100%) rename {airflow/providers/amazon/aws/links => providers/src/airflow/providers/amazon/aws/executors/utils}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/executors/utils/base_config_keys.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/executors/utils/exponential_backoff_retry.py (100%) rename {airflow/providers/amazon/aws/log => providers/src/airflow/providers/amazon/aws/fs}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/fs/s3.py (100%) rename {airflow/providers/amazon/aws/notifications => providers/src/airflow/providers/amazon/aws/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/appflow.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/athena.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/athena_sql.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/base_aws.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/batch_client.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/batch_waiters.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/batch_waiters.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/bedrock.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/chime.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/cloud_formation.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/comprehend.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/datasync.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/dms.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/dynamodb.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/ec2.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/ecr.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/ecs.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/eks.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/elasticache_replication_group.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/emr.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/eventbridge.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/glacier.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/glue.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/glue_catalog.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/glue_crawler.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/glue_databrew.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/kinesis.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/kinesis_analytics.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/lambda_function.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/logs.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/neptune.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/opensearch_serverless.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/quicksight.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/rds.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/redshift_cluster.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/redshift_data.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/redshift_sql.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/sagemaker.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/secrets_manager.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/ses.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/sns.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/sqs.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/ssm.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/step_function.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/sts.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/hooks/verified_permissions.py (100%) rename {airflow/providers/amazon/aws/operators => providers/src/airflow/providers/amazon/aws/links}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/links/athena.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/links/base_aws.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/links/batch.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/links/emr.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/links/glue.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/links/logs.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/links/step_function.py (100%) rename {airflow/providers/amazon/aws/secrets => providers/src/airflow/providers/amazon/aws/log}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/log/cloudwatch_task_handler.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/log/s3_task_handler.py (100%) rename {airflow/providers/amazon/aws/sensors => providers/src/airflow/providers/amazon/aws/notifications}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/notifications/chime.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/notifications/sns.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/notifications/sqs.py (100%) rename {airflow/providers/amazon/aws/transfers => providers/src/airflow/providers/amazon/aws/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/appflow.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/athena.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/base_aws.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/batch.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/bedrock.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/cloud_formation.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/comprehend.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/datasync.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/dms.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/ec2.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/ecs.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/eks.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/emr.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/eventbridge.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/glacier.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/glue.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/glue_crawler.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/glue_databrew.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/kinesis_analytics.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/lambda_function.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/neptune.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/quicksight.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/rds.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/redshift_cluster.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/redshift_data.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/sagemaker.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/sns.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/sqs.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/operators/step_function.py (100%) rename {airflow/providers/amazon/aws/waiters => providers/src/airflow/providers/amazon/aws/secrets}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/secrets/secrets_manager.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/secrets/systems_manager.py (100%) rename {airflow/providers/apache => providers/src/airflow/providers/amazon/aws/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/athena.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/base_aws.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/batch.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/bedrock.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/cloud_formation.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/comprehend.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/dms.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/dynamodb.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/ec2.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/ecs.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/eks.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/emr.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/glacier.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/glue.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/glue_catalog_partition.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/glue_crawler.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/kinesis_analytics.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/lambda_function.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/opensearch_serverless.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/quicksight.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/rds.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/redshift_cluster.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/sagemaker.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/sqs.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/sensors/step_function.py (100%) rename {airflow/providers/apache/beam/triggers => providers/src/airflow/providers/amazon/aws/transfers}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/azure_blob_to_s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/base.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/dynamodb_to_s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/exasol_to_s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/ftp_to_s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/gcs_to_s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/glacier_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/google_api_to_s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/hive_to_dynamodb.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/http_to_s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/imap_attachment_to_s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/local_to_s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/mongo_to_s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/redshift_to_s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/s3_to_dynamodb.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/s3_to_ftp.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/s3_to_redshift.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/s3_to_sftp.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/s3_to_sql.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/salesforce_to_s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/sftp_to_s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/transfers/sql_to_s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/README.md (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/athena.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/base.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/batch.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/bedrock.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/comprehend.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/ec2.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/ecs.py (96%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/eks.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/emr.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/glue.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/glue_crawler.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/glue_databrew.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/kinesis_analytics.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/lambda_function.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/neptune.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/opensearch_serverless.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/rds.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/redshift_cluster.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/redshift_data.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/s3.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/sagemaker.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/sqs.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/triggers/step_function.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/asset_compat_lineage_collector.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/connection_wrapper.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/eks_get_token.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/emailer.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/identifiers.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/mixins.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/openlineage.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/rds.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/redshift.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/sagemaker.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/sqs.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/suppress.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/tags.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/task_log_fetcher.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/waiter.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/utils/waiter_with_logging.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/README.md (100%) rename {airflow/providers/apache/cassandra/hooks => providers/src/airflow/providers/amazon/aws/waiters}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/appflow.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/athena.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/base_waiter.py (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/batch.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/bedrock-agent.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/bedrock.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/comprehend.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/databrew.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/dynamodb.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/ecs.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/eks.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/emr-containers.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/emr-serverless.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/emr.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/glue.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/kinesisanalyticsv2.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/neptune.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/opensearchserverless.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/rds.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/redshift.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/sagemaker.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/aws/waiters/stepfunctions.json (100%) rename {airflow => providers/src/airflow}/providers/amazon/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/apache/beam/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/beam/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/beam/README.md (100%) rename {airflow => providers/src/airflow}/providers/apache/beam/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/beam/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/beam/hooks/beam.py (100%) rename {airflow => providers/src/airflow}/providers/apache/beam/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/beam/operators/beam.py (100%) rename {airflow => providers/src/airflow}/providers/apache/beam/provider.yaml (100%) rename {airflow/providers/apache/cassandra/sensors => providers/src/airflow/providers/apache/beam/triggers}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/beam/triggers/beam.py (100%) rename {airflow => providers/src/airflow}/providers/apache/cassandra/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/cassandra/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/cassandra/__init__.py (100%) rename {airflow/providers/apache/druid/transfers => providers/src/airflow/providers/apache/cassandra/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/cassandra/hooks/cassandra.py (100%) rename {airflow => providers/src/airflow}/providers/apache/cassandra/provider.yaml (100%) rename {airflow/providers/apache/hdfs/log => providers/src/airflow/providers/apache/cassandra/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/cassandra/sensors/record.py (100%) rename {airflow => providers/src/airflow}/providers/apache/cassandra/sensors/table.py (100%) rename {airflow => providers/src/airflow}/providers/apache/drill/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/drill/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/drill/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/drill/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/drill/hooks/drill.py (100%) rename {airflow => providers/src/airflow}/providers/apache/drill/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/drill/operators/drill.py (100%) rename {airflow => providers/src/airflow}/providers/apache/drill/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/apache/druid/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/druid/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/druid/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/druid/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/druid/hooks/druid.py (100%) rename {airflow => providers/src/airflow}/providers/apache/druid/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/druid/operators/druid.py (100%) rename {airflow => providers/src/airflow}/providers/apache/druid/operators/druid_check.py (100%) rename {airflow => providers/src/airflow}/providers/apache/druid/provider.yaml (100%) rename {airflow/providers/apache/hive => providers/src/airflow/providers/apache/druid}/transfers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/druid/transfers/hive_to_druid.py (100%) rename {airflow => providers/src/airflow}/providers/apache/flink/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/flink/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/flink/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/flink/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/flink/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/flink/operators/flink_kubernetes.py (100%) rename {airflow => providers/src/airflow}/providers/apache/flink/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/apache/flink/sensors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/flink/sensors/flink_kubernetes.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hdfs/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/hdfs/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/hdfs/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hdfs/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hdfs/hooks/hdfs.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hdfs/hooks/webhdfs.py (100%) rename {airflow/providers/apache/kafka/hooks => providers/src/airflow/providers/apache/hdfs/log}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hdfs/log/hdfs_task_handler.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hdfs/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/apache/hdfs/sensors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hdfs/sensors/hdfs.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hdfs/sensors/web_hdfs.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/hooks/hive.py (99%) rename {airflow => providers/src/airflow}/providers/apache/hive/macros/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/macros/hive.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/operators/hive.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/operators/hive_stats.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/plugins/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/plugins/hive.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/sensors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/sensors/hive_partition.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/sensors/metastore_partition.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/sensors/named_hive_partition.py (100%) rename {airflow/providers/apache/kafka/operators => providers/src/airflow/providers/apache/hive/transfers}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/transfers/hive_to_mysql.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/transfers/hive_to_samba.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/transfers/mssql_to_hive.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/transfers/mysql_to_hive.py (100%) rename {airflow => providers/src/airflow}/providers/apache/hive/transfers/s3_to_hive.py (97%) rename {airflow => providers/src/airflow}/providers/apache/hive/transfers/vertica_to_hive.py (100%) rename {airflow => providers/src/airflow}/providers/apache/iceberg/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/iceberg/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/iceberg/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/iceberg/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/iceberg/hooks/iceberg.py (100%) rename {airflow => providers/src/airflow}/providers/apache/iceberg/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/apache/impala/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/impala/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/impala/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/impala/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/impala/hooks/impala.py (100%) rename {airflow => providers/src/airflow}/providers/apache/impala/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/apache/kafka/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/kafka/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/kafka/__init__.py (100%) rename {airflow/providers/apache/kafka/sensors => providers/src/airflow/providers/apache/kafka/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/kafka/hooks/base.py (100%) rename {airflow => providers/src/airflow}/providers/apache/kafka/hooks/client.py (100%) rename {airflow => providers/src/airflow}/providers/apache/kafka/hooks/consume.py (100%) rename {airflow => providers/src/airflow}/providers/apache/kafka/hooks/produce.py (100%) rename {airflow/providers/apache/kafka/triggers => providers/src/airflow/providers/apache/kafka/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/kafka/operators/consume.py (100%) rename {airflow => providers/src/airflow}/providers/apache/kafka/operators/produce.py (100%) rename {airflow => providers/src/airflow}/providers/apache/kafka/provider.yaml (100%) rename {airflow/providers/apache/kylin/hooks => providers/src/airflow/providers/apache/kafka/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/kafka/sensors/kafka.py (100%) rename {airflow/providers/apache/kylin/operators => providers/src/airflow/providers/apache/kafka/triggers}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/kafka/triggers/await_message.py (100%) rename {airflow => providers/src/airflow}/providers/apache/kylin/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/kylin/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/kylin/__init__.py (100%) rename {airflow/providers/apache/livy => providers/src/airflow/providers/apache/kylin}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/kylin/hooks/kylin.py (100%) rename {airflow/providers/apache/livy => providers/src/airflow/providers/apache/kylin}/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/kylin/operators/kylin_cube.py (100%) rename {airflow => providers/src/airflow}/providers/apache/kylin/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/apache/livy/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/livy/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/livy/__init__.py (100%) rename {airflow/providers/apache/livy/sensors => providers/src/airflow/providers/apache/livy/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/livy/hooks/livy.py (100%) rename {airflow/providers/apache/livy/triggers => providers/src/airflow/providers/apache/livy/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/livy/operators/livy.py (100%) rename {airflow => providers/src/airflow}/providers/apache/livy/provider.yaml (100%) rename {airflow/providers/arangodb/example_dags => providers/src/airflow/providers/apache/livy/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/livy/sensors/livy.py (100%) rename {airflow/providers/arangodb/hooks => providers/src/airflow/providers/apache/livy/triggers}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/livy/triggers/livy.py (100%) rename {airflow => providers/src/airflow}/providers/apache/pig/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/pig/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/pig/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/pig/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/pig/hooks/pig.py (100%) rename {airflow => providers/src/airflow}/providers/apache/pig/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/pig/operators/pig.py (100%) rename {airflow => providers/src/airflow}/providers/apache/pig/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/apache/pinot/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/pinot/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/pinot/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/pinot/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/pinot/hooks/pinot.py (100%) rename {airflow => providers/src/airflow}/providers/apache/pinot/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/decorators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/decorators/pyspark.py (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/hooks/spark_connect.py (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/hooks/spark_jdbc.py (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/hooks/spark_jdbc_script.py (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/hooks/spark_sql.py (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/hooks/spark_submit.py (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/operators/spark_jdbc.py (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/operators/spark_sql.py (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/operators/spark_submit.py (100%) rename {airflow => providers/src/airflow}/providers/apache/spark/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/apprise/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/apprise/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/apprise/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apprise/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apprise/hooks/apprise.py (100%) rename {airflow => providers/src/airflow}/providers/apprise/notifications/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/apprise/notifications/apprise.py (100%) rename {airflow => providers/src/airflow}/providers/apprise/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/arangodb/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/arangodb/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/arangodb/__init__.py (100%) rename {airflow/providers/arangodb/operators => providers/src/airflow/providers/arangodb/example_dags}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/arangodb/example_dags/example_arangodb.py (100%) rename {airflow/providers/arangodb/sensors => providers/src/airflow/providers/arangodb/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/arangodb/hooks/arangodb.py (100%) rename {airflow/providers/atlassian => providers/src/airflow/providers/arangodb/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/arangodb/operators/arangodb.py (100%) rename {airflow => providers/src/airflow}/providers/arangodb/provider.yaml (100%) rename {airflow/providers/atlassian/jira/hooks => providers/src/airflow/providers/arangodb/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/arangodb/sensors/arangodb.py (100%) rename {airflow => providers/src/airflow}/providers/asana/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/asana/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/asana/README.md (100%) rename {airflow => providers/src/airflow}/providers/asana/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/asana/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/asana/hooks/asana.py (100%) rename {airflow => providers/src/airflow}/providers/asana/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/asana/operators/asana_tasks.py (100%) rename {airflow => providers/src/airflow}/providers/asana/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/atlassian/jira/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/atlassian/jira/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/atlassian/jira/__init__.py (100%) rename {airflow/providers/atlassian/jira/notifications => providers/src/airflow/providers/atlassian/jira/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/atlassian/jira/hooks/jira.py (100%) rename {airflow/providers/atlassian/jira/operators => providers/src/airflow/providers/atlassian/jira/notifications}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/atlassian/jira/notifications/jira.py (100%) rename {airflow/providers/atlassian/jira/sensors => providers/src/airflow/providers/atlassian/jira/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/atlassian/jira/operators/jira.py (100%) rename {airflow => providers/src/airflow}/providers/atlassian/jira/provider.yaml (100%) rename {airflow/providers/celery/cli => providers/src/airflow/providers/atlassian/jira/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/atlassian/jira/sensors/jira.py (100%) rename {airflow => providers/src/airflow}/providers/celery/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/celery/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/celery/__init__.py (100%) rename {airflow/providers/cncf/kubernetes/backcompat => providers/src/airflow/providers/celery/cli}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/celery/cli/celery_command.py (100%) rename {airflow => providers/src/airflow}/providers/celery/executors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/celery/executors/celery_executor.py (100%) rename {airflow => providers/src/airflow}/providers/celery/executors/celery_executor_utils.py (100%) rename {airflow => providers/src/airflow}/providers/celery/executors/celery_kubernetes_executor.py (100%) rename {airflow => providers/src/airflow}/providers/celery/executors/default_celery.py (100%) rename {airflow => providers/src/airflow}/providers/celery/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/celery/sensors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/celery/sensors/celery_queue.py (100%) rename {airflow => providers/src/airflow}/providers/cloudant/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/cloudant/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/cloudant/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cloudant/cloudant_fake.py (100%) rename {airflow => providers/src/airflow}/providers/cloudant/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cloudant/hooks/cloudant.py (100%) rename {airflow => providers/src/airflow}/providers/cloudant/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/__init__.py (100%) rename {airflow/providers/cncf/kubernetes/cli => providers/src/airflow/providers/cncf/kubernetes/backcompat}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/callbacks.py (100%) rename {airflow/providers/cncf/kubernetes/hooks => providers/src/airflow/providers/cncf/kubernetes/cli}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/cli/kubernetes_command.py (100%) rename {airflow/providers/cncf => providers/src/airflow/providers/cncf/kubernetes/decorators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/decorators/kubernetes.py (100%) rename {airflow/providers/cncf/kubernetes/decorators => providers/src/airflow/providers/cncf/kubernetes/executors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/executors/kubernetes_executor.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/executors/kubernetes_executor_types.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/executors/local_kubernetes_executor.py (100%) rename {airflow/providers/cncf/kubernetes/kubernetes_executor_templates => providers/src/airflow/providers/cncf/kubernetes/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/hooks/kubernetes.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/k8s_model.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/kube_client.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/kube_config.py (100%) rename {airflow/providers/cncf/kubernetes/pod_template_file_examples => providers/src/airflow/providers/cncf/kubernetes/kubernetes_executor_templates}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/kubernetes_helper_functions.py (100%) rename {airflow/providers/cncf/kubernetes/executors => providers/src/airflow/providers/cncf/kubernetes/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/operators/custom_object_launcher.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/operators/job.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/operators/kubernetes_pod.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/operators/pod.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/operators/resource.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/operators/spark_kubernetes.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/pod_generator.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/pod_generator_deprecated.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/pod_launcher_deprecated.py (100%) rename {airflow/providers/cncf/kubernetes/resource_convert => providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/python_kubernetes_script.jinja2 (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/python_kubernetes_script.py (100%) rename {airflow/providers/cncf/kubernetes/sensors => providers/src/airflow/providers/cncf/kubernetes/resource_convert}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/resource_convert/configmap.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/resource_convert/env_variable.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/resource_convert/secret.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/secret.py (100%) rename {airflow/providers/cncf/kubernetes/triggers => providers/src/airflow/providers/cncf/kubernetes/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/sensors/spark_kubernetes.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/template_rendering.py (100%) rename {airflow/providers/cohere/operators => providers/src/airflow/providers/cncf/kubernetes/triggers}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/triggers/job.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/triggers/kubernetes_pod.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/triggers/pod.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/utils/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/utils/delete_from.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/utils/k8s_resource_iterator.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/utils/pod_manager.py (100%) rename {airflow => providers/src/airflow}/providers/cncf/kubernetes/utils/xcom_sidecar.py (100%) rename {airflow => providers/src/airflow}/providers/cohere/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/cohere/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/cohere/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cohere/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cohere/hooks/cohere.py (100%) rename {airflow/providers/common => providers/src/airflow/providers/cohere/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/cohere/operators/embedding.py (100%) rename {airflow => providers/src/airflow}/providers/cohere/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/common/compat/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/common/compat/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/common/compat/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/common/compat/assets/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/common/compat/lineage/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/common/compat/lineage/hook.py (100%) rename {airflow => providers/src/airflow}/providers/common/compat/openlineage/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/common/compat/openlineage/facet.py (100%) rename {airflow => providers/src/airflow}/providers/common/compat/openlineage/utils/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/common/compat/openlineage/utils/utils.py (100%) rename {airflow => providers/src/airflow}/providers/common/compat/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/common/compat/security/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/common/compat/security/permissions.py (100%) rename {airflow => providers/src/airflow}/providers/common/io/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/common/io/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/common/io/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/common/io/assets/__init__.py (100%) rename {airflow/providers/common/io/operators => providers/src/airflow/providers/common/io/assets/assets}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/common/io/assets/file.py (100%) rename {airflow/providers/common/sql/hooks => providers/src/airflow/providers/common/io/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/common/io/operators/file_transfer.py (100%) rename {airflow => providers/src/airflow}/providers/common/io/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/common/io/xcom/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/common/io/xcom/backend.py (100%) rename {airflow => providers/src/airflow}/providers/common/sql/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/common/sql/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/common/sql/README_API.md (100%) rename {airflow => providers/src/airflow}/providers/common/sql/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/common/sql/doc/adr/0001-record-architecture-decisions.md (100%) rename {airflow => providers/src/airflow}/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md (100%) rename {airflow/providers/common/sql/operators => providers/src/airflow/providers/common/sql/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/common/sql/hooks/sql.py (99%) rename {airflow => providers/src/airflow}/providers/common/sql/hooks/sql.pyi (92%) rename {airflow/providers/common/sql/sensors => providers/src/airflow/providers/common/sql/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/common/sql/operators/sql.py (100%) rename {airflow => providers/src/airflow}/providers/common/sql/operators/sql.pyi (92%) rename {airflow => providers/src/airflow}/providers/common/sql/provider.yaml (100%) rename {airflow/providers/databricks/plugins => providers/src/airflow/providers/common/sql/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/common/sql/sensors/sql.py (100%) rename {airflow => providers/src/airflow}/providers/common/sql/sensors/sql.pyi (91%) rename {airflow => providers/src/airflow}/providers/databricks/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/databricks/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/databricks/__init__.py (100%) rename {airflow/providers/cncf/kubernetes/operators => providers/src/airflow/providers/databricks/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/databricks/hooks/databricks.py (100%) rename {airflow => providers/src/airflow}/providers/databricks/hooks/databricks_base.py (100%) rename {airflow => providers/src/airflow}/providers/databricks/hooks/databricks_sql.py (100%) rename {airflow/providers/databricks/hooks => providers/src/airflow/providers/databricks/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/databricks/operators/databricks.py (100%) rename {airflow => providers/src/airflow}/providers/databricks/operators/databricks_repos.py (100%) rename {airflow => providers/src/airflow}/providers/databricks/operators/databricks_sql.py (100%) rename {airflow => providers/src/airflow}/providers/databricks/operators/databricks_workflow.py (100%) rename {airflow/providers/databricks/sensors => providers/src/airflow/providers/databricks/plugins}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/databricks/plugins/databricks_workflow.py (100%) rename {airflow => providers/src/airflow}/providers/databricks/provider.yaml (100%) rename {airflow/providers/databricks/utils => providers/src/airflow/providers/databricks/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/databricks/sensors/databricks_partition.py (100%) rename {airflow => providers/src/airflow}/providers/databricks/sensors/databricks_sql.py (100%) rename {airflow/providers/databricks/operators => providers/src/airflow/providers/databricks/triggers}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/databricks/triggers/databricks.py (100%) rename {airflow/providers/dbt => providers/src/airflow/providers/databricks/utils}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/databricks/utils/databricks.py (100%) rename {airflow => providers/src/airflow}/providers/datadog/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/datadog/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/datadog/__init__.py (100%) rename {airflow/providers/databricks/triggers => providers/src/airflow/providers/datadog/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/datadog/hooks/datadog.py (100%) rename {airflow => providers/src/airflow}/providers/datadog/provider.yaml (100%) rename {airflow/providers/datadog/hooks => providers/src/airflow/providers/datadog/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/datadog/sensors/datadog.py (100%) rename {airflow => providers/src/airflow}/providers/dbt/cloud/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/dbt/cloud/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/dbt/cloud/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/dbt/cloud/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/dbt/cloud/hooks/dbt.py (99%) rename {airflow => providers/src/airflow}/providers/dbt/cloud/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/dbt/cloud/operators/dbt.py (100%) rename {airflow => providers/src/airflow}/providers/dbt/cloud/provider.yaml (100%) rename {airflow/providers/datadog => providers/src/airflow/providers/dbt/cloud}/sensors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/dbt/cloud/sensors/dbt.py (100%) rename {airflow => providers/src/airflow}/providers/dbt/cloud/triggers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/dbt/cloud/triggers/dbt.py (100%) rename {airflow => providers/src/airflow}/providers/dbt/cloud/utils/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/dbt/cloud/utils/openlineage.py (100%) rename {airflow => providers/src/airflow}/providers/dingding/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/dingding/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/dingding/__init__.py (100%) rename {airflow/providers/dbt/cloud/sensors => providers/src/airflow/providers/dingding/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/dingding/hooks/dingding.py (100%) rename {airflow/providers/dingding/hooks => providers/src/airflow/providers/dingding/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/dingding/operators/dingding.py (100%) rename {airflow => providers/src/airflow}/providers/dingding/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/discord/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/discord/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/discord/__init__.py (100%) rename {airflow/providers/dingding/operators => providers/src/airflow/providers/discord/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/discord/hooks/discord_webhook.py (100%) rename {airflow/providers/discord/hooks => providers/src/airflow/providers/discord/notifications}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/discord/notifications/discord.py (100%) rename {airflow/providers/discord/notifications => providers/src/airflow/providers/discord/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/discord/operators/discord_webhook.py (100%) rename {airflow => providers/src/airflow}/providers/discord/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/docker/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/docker/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/docker/__init__.py (100%) rename {airflow/providers/discord/operators => providers/src/airflow/providers/docker/decorators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/docker/decorators/docker.py (100%) rename {airflow => providers/src/airflow}/providers/docker/exceptions.py (100%) rename {airflow/providers/docker/decorators => providers/src/airflow/providers/docker/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/docker/hooks/docker.py (100%) rename {airflow/providers/docker/hooks => providers/src/airflow/providers/docker/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/docker/operators/docker.py (100%) rename {airflow => providers/src/airflow}/providers/docker/operators/docker_swarm.py (100%) rename {airflow => providers/src/airflow}/providers/docker/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/edge/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/edge/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/edge/api_endpoints/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/edge/api_endpoints/health_endpoint.py (100%) rename {airflow => providers/src/airflow}/providers/edge/api_endpoints/rpc_api_endpoint.py (100%) rename {airflow => providers/src/airflow}/providers/edge/cli/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/edge/cli/edge_command.py (100%) rename {airflow => providers/src/airflow}/providers/edge/example_dags/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/edge/example_dags/integration_test.py (100%) rename {airflow => providers/src/airflow}/providers/edge/models/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/edge/models/edge_job.py (100%) rename {airflow => providers/src/airflow}/providers/edge/models/edge_logs.py (100%) rename {airflow => providers/src/airflow}/providers/edge/models/edge_worker.py (100%) rename {airflow => providers/src/airflow}/providers/edge/openapi/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/edge/openapi/edge_worker_api_v1.yaml (100%) rename {airflow => providers/src/airflow}/providers/edge/plugins/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/edge/plugins/edge_executor_plugin.py (100%) rename {airflow => providers/src/airflow}/providers/edge/plugins/templates/edge_worker_hosts.html (100%) rename {airflow => providers/src/airflow}/providers/edge/plugins/templates/edge_worker_jobs.html (100%) rename {airflow => providers/src/airflow}/providers/edge/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/elasticsearch/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/elasticsearch/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/elasticsearch/__init__.py (100%) rename {airflow/providers/docker/operators => providers/src/airflow/providers/elasticsearch/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/elasticsearch/hooks/elasticsearch.py (100%) rename {airflow => providers/src/airflow}/providers/elasticsearch/log/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/elasticsearch/log/es_json_formatter.py (100%) rename {airflow => providers/src/airflow}/providers/elasticsearch/log/es_response.py (100%) rename {airflow => providers/src/airflow}/providers/elasticsearch/log/es_task_handler.py (100%) rename {airflow => providers/src/airflow}/providers/elasticsearch/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/exasol/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/exasol/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/exasol/__init__.py (100%) rename {airflow/providers/elasticsearch => providers/src/airflow/providers/exasol}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/exasol/hooks/exasol.py (100%) rename {airflow/providers/exasol/hooks => providers/src/airflow/providers/exasol/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/exasol/operators/exasol.py (100%) rename {airflow => providers/src/airflow}/providers/exasol/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/fab/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/fab/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/fab/alembic.ini (100%) rename {airflow/providers/exasol/operators => providers/src/airflow/providers/fab/auth_manager}/__init__.py (100%) rename {airflow/providers/fab/auth_manager => providers/src/airflow/providers/fab/auth_manager/api}/__init__.py (100%) rename {airflow/providers/fab/auth_manager/api => providers/src/airflow/providers/fab/auth_manager/api/auth}/__init__.py (100%) rename {airflow/providers/fab/auth_manager/api/auth => providers/src/airflow/providers/fab/auth_manager/api/auth/backend}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/api/auth/backend/basic_auth.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/api/auth/backend/kerberos_auth.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/api_endpoints/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/api_endpoints/role_and_permission_endpoint.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/api_endpoints/user_endpoint.py (100%) rename {airflow/providers/fab/auth_manager/api/auth/backend => providers/src/airflow/providers/fab/auth_manager/cli_commands}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/cli_commands/db_command.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/cli_commands/definition.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/cli_commands/role_command.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/cli_commands/sync_perm_command.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/cli_commands/user_command.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/cli_commands/utils.py (100%) rename {airflow/providers/fab/auth_manager/cli_commands => providers/src/airflow/providers/fab/auth_manager/decorators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/decorators/auth.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/fab_auth_manager.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/models/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/models/anonymous_user.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/models/db.py (94%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/openapi/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/openapi/v1.yaml (100%) rename {airflow/providers/fab/auth_manager/decorators => providers/src/airflow/providers/fab/auth_manager/security_manager}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/security_manager/constants.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/security_manager/override.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/views/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/views/permissions.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/views/roles_list.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/views/user.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/views/user_edit.py (100%) rename {airflow => providers/src/airflow}/providers/fab/auth_manager/views/user_stats.py (100%) rename {airflow => providers/src/airflow}/providers/fab/migrations/README (100%) rename {airflow => providers/src/airflow}/providers/fab/migrations/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/fab/migrations/env.py (100%) rename {airflow => providers/src/airflow}/providers/fab/migrations/script.py.mako (100%) rename {airflow => providers/src/airflow}/providers/fab/migrations/versions/0001_1_4_0_placeholder_migration.py (100%) rename {airflow => providers/src/airflow}/providers/fab/migrations/versions/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/fab/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/facebook/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/facebook/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/facebook/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/facebook/ads/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/facebook/ads/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/facebook/ads/hooks/ads.py (100%) rename {airflow => providers/src/airflow}/providers/facebook/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/ftp/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/ftp/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/ftp/__init__.py (100%) rename {airflow/providers/fab/auth_manager/security_manager => providers/src/airflow/providers/ftp/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/ftp/hooks/ftp.py (100%) rename {airflow => providers/src/airflow}/providers/ftp/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/ftp/operators/ftp.py (100%) rename {airflow => providers/src/airflow}/providers/ftp/provider.yaml (100%) rename {airflow/providers/ftp/hooks => providers/src/airflow/providers/ftp/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/ftp/sensors/ftp.py (100%) rename {airflow => providers/src/airflow}/providers/github/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/github/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/github/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/github/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/github/hooks/github.py (100%) rename {airflow => providers/src/airflow}/providers/github/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/github/operators/github.py (100%) rename {airflow => providers/src/airflow}/providers/github/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/github/sensors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/github/sensors/github.py (100%) rename {airflow => providers/src/airflow}/providers/google/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/google/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/google/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/ads/.gitignore (100%) rename {airflow => providers/src/airflow}/providers/google/ads/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/ads/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/ads/hooks/ads.py (100%) rename {airflow => providers/src/airflow}/providers/google/ads/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/ads/operators/ads.py (100%) rename {airflow => providers/src/airflow}/providers/google/ads/transfers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/ads/transfers/ads_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/_internal_client/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/_internal_client/secret_manager_client.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/example_dags/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/example_dags/example_cloud_task.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/example_dags/example_looker.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/example_dags/example_presto_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/example_dags/example_salesforce_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/fs/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/fs/gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/automl.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/bigquery.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/bigquery_dts.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/bigtable.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/cloud_batch.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/cloud_build.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/cloud_composer.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/cloud_memorystore.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/cloud_run.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/cloud_sql.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/cloud_storage_transfer_service.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/compute.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/compute_ssh.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/datacatalog.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/dataflow.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/dataform.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/datafusion.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/datapipeline.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/dataplex.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/dataprep.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/dataproc.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/dataproc_metastore.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/datastore.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/dlp.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/functions.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/gdm.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/kms.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/kubernetes_engine.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/life_sciences.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/looker.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/mlengine.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/natural_language.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/os_login.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/pubsub.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/secret_manager.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/spanner.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/speech_to_text.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/stackdriver.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/tasks.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/text_to_speech.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/translate.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/vertex_ai/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/vertex_ai/auto_ml.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/vertex_ai/custom_job.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/vertex_ai/dataset.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/vertex_ai/endpoint_service.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/vertex_ai/generative_model.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/vertex_ai/model_service.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/vertex_ai/pipeline_job.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/vertex_ai/prediction_service.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/video_intelligence.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/vision.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/hooks/workflows.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/automl.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/base.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/bigquery.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/bigquery_dts.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/bigtable.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/cloud_build.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/cloud_functions.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/cloud_memorystore.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/cloud_sql.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/cloud_storage_transfer.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/cloud_tasks.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/compute.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/data_loss_prevention.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/datacatalog.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/dataflow.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/dataform.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/datafusion.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/dataplex.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/dataprep.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/dataproc.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/datastore.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/kubernetes_engine.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/life_sciences.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/mlengine.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/pubsub.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/spanner.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/stackdriver.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/translate.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/vertex_ai.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/links/workflows.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/log/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/log/gcs_task_handler.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/log/stackdriver_task_handler.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/openlineage/BigQueryErrorRunFacet.json (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/openlineage/BigQueryJobRunFacet.json (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/openlineage/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/openlineage/mixins.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/openlineage/utils.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/automl.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/bigquery.py (99%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/bigquery_dts.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/bigtable.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/cloud_base.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/cloud_batch.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/cloud_build.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/cloud_composer.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/cloud_memorystore.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/cloud_run.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/cloud_sql.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/cloud_storage_transfer_service.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/compute.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/datacatalog.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/dataflow.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/dataform.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/datafusion.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/datapipeline.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/dataplex.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/dataprep.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/dataproc.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/dataproc_metastore.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/datastore.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/dlp.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/functions.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/kubernetes_engine.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/life_sciences.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/looker.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/mlengine.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/natural_language.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/pubsub.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/spanner.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/speech_to_text.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/stackdriver.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/tasks.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/text_to_speech.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/translate.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/translate_speech.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/vertex_ai/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/vertex_ai/auto_ml.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/vertex_ai/custom_job.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/vertex_ai/dataset.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/vertex_ai/endpoint_service.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/vertex_ai/generative_model.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/vertex_ai/model_service.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/vertex_ai/pipeline_job.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/video_intelligence.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/vision.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/operators/workflows.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/secrets/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/secrets/secret_manager.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/bigquery.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/bigquery_dts.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/bigtable.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/cloud_composer.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/cloud_storage_transfer_service.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/dataflow.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/dataform.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/datafusion.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/dataplex.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/dataprep.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/dataproc.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/dataproc_metastore.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/looker.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/pubsub.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/tasks.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/sensors/workflows.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/adls_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/azure_blob_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/azure_fileshare_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/bigquery_to_bigquery.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/bigquery_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/bigquery_to_mssql.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/bigquery_to_mysql.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/bigquery_to_postgres.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/bigquery_to_sql.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/calendar_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/cassandra_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/facebook_ads_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/gcs_to_bigquery.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/gcs_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/gcs_to_local.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/gcs_to_sftp.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/gdrive_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/gdrive_to_local.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/local_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/mssql_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/mysql_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/oracle_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/postgres_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/presto_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/s3_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/salesforce_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/sftp_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/sheets_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/sql_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/transfers/trino_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/bigquery.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/bigquery_dts.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/cloud_batch.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/cloud_build.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/cloud_composer.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/cloud_run.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/cloud_sql.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/cloud_storage_transfer_service.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/dataflow.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/datafusion.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/dataplex.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/dataproc.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/gcs.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/kubernetes_engine.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/mlengine.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/pubsub.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/triggers/vertex_ai.py (100%) rename {airflow/providers/ftp/sensors => providers/src/airflow/providers/google/cloud/utils}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/utils/bigquery.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/utils/bigquery_get_data.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/utils/credentials_provider.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/utils/dataform.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/utils/datafusion.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/utils/dataproc.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/utils/external_token_supplier.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/utils/field_sanitizer.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/utils/field_validator.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/utils/helpers.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/utils/mlengine_operator_utils.py (100%) rename {airflow => providers/src/airflow}/providers/google/cloud/utils/mlengine_prediction_summary.py (100%) rename {airflow => providers/src/airflow}/providers/google/common/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/common/auth_backend/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/common/auth_backend/google_openid.py (100%) rename {airflow => providers/src/airflow}/providers/google/common/consts.py (100%) rename {airflow => providers/src/airflow}/providers/google/common/deprecated.py (100%) rename {airflow => providers/src/airflow}/providers/google/common/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/common/hooks/base_google.py (100%) rename {airflow => providers/src/airflow}/providers/google/common/hooks/discovery_api.py (100%) rename {airflow => providers/src/airflow}/providers/google/common/links/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/common/links/storage.py (100%) rename {airflow => providers/src/airflow}/providers/google/common/utils/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/common/utils/id_token_credentials.py (100%) rename {airflow => providers/src/airflow}/providers/google/datasets/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/datasets/bigquery.py (100%) rename {airflow => providers/src/airflow}/providers/google/firebase/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/firebase/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/firebase/hooks/firestore.py (100%) rename {airflow => providers/src/airflow}/providers/google/firebase/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/firebase/operators/firestore.py (100%) rename {airflow => providers/src/airflow}/providers/google/go_module_utils.py (100%) rename {airflow => providers/src/airflow}/providers/google/leveldb/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/leveldb/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/leveldb/hooks/leveldb.py (100%) rename {airflow => providers/src/airflow}/providers/google/leveldb/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/leveldb/operators/leveldb.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/example_dags/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/example_dags/example_display_video.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/hooks/analytics.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/hooks/analytics_admin.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/hooks/campaign_manager.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/hooks/display_video.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/hooks/search_ads.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/links/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/links/analytics_admin.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/operators/analytics.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/operators/analytics_admin.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/operators/campaign_manager.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/operators/display_video.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/operators/search_ads.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/sensors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/sensors/campaign_manager.py (100%) rename {airflow => providers/src/airflow}/providers/google/marketing_platform/sensors/display_video.py (100%) rename {airflow => providers/src/airflow}/providers/google/provider.yaml (100%) rename {airflow/providers/google/cloud/utils => providers/src/airflow/providers/google/suite}/__init__.py (100%) rename {airflow/providers/google/suite => providers/src/airflow/providers/google/suite/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/suite/hooks/calendar.py (100%) rename {airflow => providers/src/airflow}/providers/google/suite/hooks/drive.py (100%) rename {airflow => providers/src/airflow}/providers/google/suite/hooks/sheets.py (100%) rename {airflow/providers/google/suite/hooks => providers/src/airflow/providers/google/suite/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/suite/operators/sheets.py (100%) rename {airflow => providers/src/airflow}/providers/google/suite/sensors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/suite/sensors/drive.py (100%) rename {airflow => providers/src/airflow}/providers/google/suite/transfers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/google/suite/transfers/gcs_to_gdrive.py (100%) rename {airflow => providers/src/airflow}/providers/google/suite/transfers/gcs_to_sheets.py (100%) rename {airflow => providers/src/airflow}/providers/google/suite/transfers/local_to_drive.py (100%) rename {airflow => providers/src/airflow}/providers/google/suite/transfers/sql_to_sheets.py (100%) rename {airflow => providers/src/airflow}/providers/grpc/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/grpc/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/grpc/__init__.py (100%) rename {airflow/providers/google/suite/operators => providers/src/airflow/providers/grpc/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/grpc/hooks/grpc.py (100%) rename {airflow/providers/grpc/hooks => providers/src/airflow/providers/grpc/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/grpc/operators/grpc.py (100%) rename {airflow => providers/src/airflow}/providers/grpc/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/hashicorp/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/hashicorp/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/hashicorp/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/hashicorp/_internal_client/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/hashicorp/_internal_client/vault_client.py (100%) rename {airflow => providers/src/airflow}/providers/hashicorp/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/hashicorp/hooks/vault.py (100%) rename {airflow => providers/src/airflow}/providers/hashicorp/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/hashicorp/secrets/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/hashicorp/secrets/vault.py (100%) rename {airflow => providers/src/airflow}/providers/http/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/http/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/http/__init__.py (100%) rename {airflow/providers/grpc/operators => providers/src/airflow/providers/http/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/http/hooks/http.py (100%) rename {airflow/providers/http/hooks => providers/src/airflow/providers/http/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/http/operators/http.py (100%) rename {airflow => providers/src/airflow}/providers/http/provider.yaml (100%) rename {airflow/providers/http/operators => providers/src/airflow/providers/http/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/http/sensors/http.py (100%) rename {airflow/providers/http/sensors => providers/src/airflow/providers/http/triggers}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/http/triggers/http.py (100%) rename {airflow => providers/src/airflow}/providers/imap/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/imap/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/imap/__init__.py (100%) rename {airflow/providers/http/triggers => providers/src/airflow/providers/imap/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/imap/hooks/imap.py (100%) rename {airflow => providers/src/airflow}/providers/imap/provider.yaml (100%) rename {airflow/providers/imap/hooks => providers/src/airflow/providers/imap/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/imap/sensors/imap_attachment.py (100%) rename {airflow => providers/src/airflow}/providers/influxdb/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/influxdb/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/influxdb/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/influxdb/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/influxdb/hooks/influxdb.py (100%) rename {airflow => providers/src/airflow}/providers/influxdb/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/influxdb/operators/influxdb.py (100%) rename {airflow => providers/src/airflow}/providers/influxdb/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/jdbc/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/jdbc/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/jdbc/__init__.py (100%) rename {airflow/providers/imap/sensors => providers/src/airflow/providers/jdbc/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/jdbc/hooks/jdbc.py (100%) rename {airflow/providers/jdbc/hooks => providers/src/airflow/providers/jdbc/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/jdbc/operators/jdbc.py (100%) rename {airflow => providers/src/airflow}/providers/jdbc/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/jenkins/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/jenkins/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/jenkins/__init__.py (100%) rename {airflow/providers/jdbc/operators => providers/src/airflow/providers/jenkins/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/jenkins/hooks/jenkins.py (100%) rename {airflow/providers/jenkins/hooks => providers/src/airflow/providers/jenkins/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/jenkins/operators/jenkins_job_trigger.py (100%) rename {airflow => providers/src/airflow}/providers/jenkins/provider.yaml (100%) rename {airflow/providers/jenkins/operators => providers/src/airflow/providers/jenkins/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/jenkins/sensors/jenkins.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/fs/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/fs/adls.py (100%) rename {airflow/providers/jenkins/sensors => providers/src/airflow/providers/microsoft/azure/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/adx.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/asb.py (96%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/base_azure.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/batch.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/container_instance.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/container_registry.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/container_volume.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/cosmos.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/data_factory.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/data_lake.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/fileshare.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/msgraph.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/powerbi.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/synapse.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/hooks/wasb.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/log/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/log/wasb_task_handler.py (100%) rename {airflow/providers/microsoft => providers/src/airflow/providers/microsoft/azure/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/operators/adls.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/operators/adx.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/operators/asb.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/operators/batch.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/operators/container_instances.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/operators/cosmos.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/operators/data_factory.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/operators/msgraph.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/operators/powerbi.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/operators/synapse.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/operators/wasb_delete_blob.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/secrets/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/secrets/key_vault.py (100%) rename {airflow/providers/microsoft/azure/hooks => providers/src/airflow/providers/microsoft/azure/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/sensors/cosmos.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/sensors/data_factory.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/sensors/msgraph.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/sensors/wasb.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/transfers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/transfers/azure_blob_to_gcs.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/transfers/local_to_adls.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/transfers/local_to_wasb.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/transfers/s3_to_wasb.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/transfers/sftp_to_wasb.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/triggers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/triggers/data_factory.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/triggers/msgraph.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/triggers/powerbi.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/triggers/wasb.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/azure/utils.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/mssql/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/microsoft/mssql/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/microsoft/mssql/__init__.py (100%) rename {airflow/providers/microsoft/azure/operators => providers/src/airflow/providers/microsoft/mssql/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/mssql/hooks/mssql.py (100%) rename {airflow/providers/microsoft/azure/sensors => providers/src/airflow/providers/microsoft/mssql/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/mssql/operators/mssql.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/mssql/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/microsoft/psrp/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/microsoft/psrp/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/microsoft/psrp/__init__.py (100%) rename {airflow/providers/microsoft/mssql => providers/src/airflow/providers/microsoft/psrp}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/psrp/hooks/psrp.py (100%) rename {airflow/providers/microsoft/mssql => providers/src/airflow/providers/microsoft/psrp}/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/psrp/operators/psrp.py (93%) rename {airflow => providers/src/airflow}/providers/microsoft/psrp/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/microsoft/winrm/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/microsoft/winrm/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/microsoft/winrm/__init__.py (100%) rename {airflow/providers/microsoft/psrp => providers/src/airflow/providers/microsoft/winrm}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/winrm/hooks/winrm.py (100%) rename {airflow/providers/microsoft/psrp => providers/src/airflow/providers/microsoft/winrm}/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/winrm/operators/winrm.py (100%) rename {airflow => providers/src/airflow}/providers/microsoft/winrm/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/mongo/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/mongo/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/mongo/__init__.py (100%) rename {airflow/providers/microsoft/winrm => providers/src/airflow/providers/mongo}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/mongo/hooks/mongo.py (100%) rename {airflow => providers/src/airflow}/providers/mongo/provider.yaml (100%) rename {airflow/providers/microsoft/winrm/operators => providers/src/airflow/providers/mongo/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/mongo/sensors/mongo.py (100%) rename {airflow => providers/src/airflow}/providers/mysql/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/mysql/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/mysql/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/mysql/assets/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/mysql/assets/mysql.py (100%) rename {airflow/providers/mongo => providers/src/airflow/providers/mysql}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/mysql/hooks/mysql.py (100%) rename {airflow/providers/mongo/sensors => providers/src/airflow/providers/mysql/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/mysql/operators/mysql.py (100%) rename {airflow => providers/src/airflow}/providers/mysql/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/mysql/transfers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/mysql/transfers/presto_to_mysql.py (100%) rename {airflow => providers/src/airflow}/providers/mysql/transfers/s3_to_mysql.py (100%) rename {airflow => providers/src/airflow}/providers/mysql/transfers/trino_to_mysql.py (100%) rename {airflow => providers/src/airflow}/providers/mysql/transfers/vertica_to_mysql.py (100%) rename {airflow => providers/src/airflow}/providers/neo4j/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/neo4j/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/neo4j/README.md (100%) rename {airflow => providers/src/airflow}/providers/neo4j/__init__.py (100%) rename {airflow/providers/mysql => providers/src/airflow/providers/neo4j}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/neo4j/hooks/neo4j.py (100%) rename {airflow/providers/mysql => providers/src/airflow/providers/neo4j}/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/neo4j/operators/neo4j.py (100%) rename {airflow => providers/src/airflow}/providers/neo4j/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/odbc/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/odbc/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/odbc/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/odbc/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/odbc/hooks/odbc.py (100%) rename {airflow => providers/src/airflow}/providers/odbc/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/openai/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/openai/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/openai/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/openai/exceptions.py (100%) rename {airflow => providers/src/airflow}/providers/openai/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/openai/hooks/openai.py (100%) rename {airflow => providers/src/airflow}/providers/openai/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/openai/operators/openai.py (100%) rename {airflow => providers/src/airflow}/providers/openai/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/openai/triggers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/openai/triggers/openai.py (100%) rename {airflow => providers/src/airflow}/providers/openfaas/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/openfaas/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/openfaas/__init__.py (100%) rename {airflow/providers/neo4j => providers/src/airflow/providers/openfaas}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/openfaas/hooks/openfaas.py (100%) rename {airflow => providers/src/airflow}/providers/openfaas/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/openlineage/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/openlineage/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/openlineage/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/conf.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/extractors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/extractors/base.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/extractors/bash.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/extractors/manager.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/extractors/python.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/facets/AirflowDagRunFacet.json (100%) rename {airflow => providers/src/airflow}/providers/openlineage/facets/AirflowDebugRunFacet.json (100%) rename {airflow => providers/src/airflow}/providers/openlineage/facets/AirflowJobFacet.json (100%) rename {airflow => providers/src/airflow}/providers/openlineage/facets/AirflowRunFacet.json (100%) rename {airflow => providers/src/airflow}/providers/openlineage/facets/AirflowStateRunFacet.json (100%) rename {airflow => providers/src/airflow}/providers/openlineage/facets/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/plugins/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/plugins/adapter.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/plugins/facets.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/plugins/listener.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/plugins/macros.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/plugins/openlineage.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/openlineage/sqlparser.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/utils/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/utils/asset_compat_lineage_collector.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/utils/selective_enable.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/utils/sql.py (100%) rename {airflow => providers/src/airflow}/providers/openlineage/utils/utils.py (100%) rename {airflow => providers/src/airflow}/providers/opensearch/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/opensearch/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/opensearch/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/opensearch/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/opensearch/hooks/opensearch.py (100%) rename {airflow => providers/src/airflow}/providers/opensearch/log/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/opensearch/log/os_json_formatter.py (100%) rename {airflow => providers/src/airflow}/providers/opensearch/log/os_response.py (100%) rename {airflow => providers/src/airflow}/providers/opensearch/log/os_task_handler.py (100%) rename {airflow => providers/src/airflow}/providers/opensearch/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/opensearch/operators/opensearch.py (100%) rename {airflow => providers/src/airflow}/providers/opensearch/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/opsgenie/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/opsgenie/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/opsgenie/__init__.py (100%) rename {airflow/providers/neo4j/operators => providers/src/airflow/providers/opsgenie/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/opsgenie/hooks/opsgenie.py (100%) rename {airflow => providers/src/airflow}/providers/opsgenie/notifications/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/opsgenie/notifications/opsgenie.py (100%) rename {airflow/providers/openfaas/hooks => providers/src/airflow/providers/opsgenie/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/opsgenie/operators/opsgenie.py (100%) rename {airflow => providers/src/airflow}/providers/opsgenie/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/opsgenie/typing/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/opsgenie/typing/opsgenie.py (100%) rename {airflow => providers/src/airflow}/providers/oracle/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/oracle/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/oracle/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/oracle/example_dags/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/oracle/example_dags/example_oracle.py (100%) rename {airflow/providers/opsgenie => providers/src/airflow/providers/oracle}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/oracle/hooks/oracle.py (100%) rename {airflow/providers/opsgenie => providers/src/airflow/providers/oracle}/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/oracle/operators/oracle.py (100%) rename {airflow => providers/src/airflow}/providers/oracle/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/oracle/transfers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/oracle/transfers/oracle_to_oracle.py (100%) rename {airflow => providers/src/airflow}/providers/pagerduty/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/pagerduty/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/pagerduty/__init__.py (100%) rename {airflow/providers/oracle => providers/src/airflow/providers/pagerduty}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/pagerduty/hooks/pagerduty.py (100%) rename {airflow => providers/src/airflow}/providers/pagerduty/hooks/pagerduty_events.py (100%) rename {airflow => providers/src/airflow}/providers/pagerduty/notifications/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/pagerduty/notifications/pagerduty.py (100%) rename {airflow => providers/src/airflow}/providers/pagerduty/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/papermill/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/papermill/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/papermill/__init__.py (100%) rename {airflow/providers/oracle/operators => providers/src/airflow/providers/papermill/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/papermill/hooks/kernel.py (100%) rename {airflow/providers/pagerduty/hooks => providers/src/airflow/providers/papermill/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/papermill/operators/papermill.py (100%) rename {airflow => providers/src/airflow}/providers/papermill/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/pgvector/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/pgvector/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/pgvector/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/pgvector/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/pgvector/hooks/pgvector.py (100%) rename {airflow => providers/src/airflow}/providers/pgvector/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/pgvector/operators/pgvector.py (100%) rename {airflow => providers/src/airflow}/providers/pgvector/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/pinecone/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/pinecone/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/pinecone/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/pinecone/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/pinecone/hooks/pinecone.py (100%) rename {airflow => providers/src/airflow}/providers/pinecone/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/pinecone/operators/pinecone.py (100%) rename {airflow => providers/src/airflow}/providers/pinecone/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/postgres/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/postgres/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/postgres/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/postgres/assets/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/postgres/assets/postgres.py (100%) rename {airflow/providers/papermill => providers/src/airflow/providers/postgres}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/postgres/hooks/postgres.py (100%) rename {airflow/providers/papermill => providers/src/airflow/providers/postgres}/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/postgres/operators/postgres.py (100%) rename {airflow => providers/src/airflow}/providers/postgres/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/presto/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/presto/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/presto/__init__.py (100%) rename {airflow/providers/postgres => providers/src/airflow/providers/presto}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/presto/hooks/presto.py (100%) rename {airflow => providers/src/airflow}/providers/presto/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/presto/transfers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/presto/transfers/gcs_to_presto.py (100%) rename {airflow => providers/src/airflow}/providers/qdrant/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/qdrant/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/qdrant/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/qdrant/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/qdrant/hooks/qdrant.py (100%) rename {airflow => providers/src/airflow}/providers/qdrant/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/qdrant/operators/qdrant.py (100%) rename {airflow => providers/src/airflow}/providers/qdrant/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/redis/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/redis/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/redis/__init__.py (100%) rename {airflow/providers/postgres/operators => providers/src/airflow/providers/redis/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/redis/hooks/redis.py (100%) rename {airflow/providers/presto/hooks => providers/src/airflow/providers/redis/log}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/redis/log/redis_task_handler.py (100%) rename {airflow/providers/redis/hooks => providers/src/airflow/providers/redis/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/redis/operators/redis_publish.py (100%) rename {airflow => providers/src/airflow}/providers/redis/provider.yaml (100%) rename {airflow/providers/redis/log => providers/src/airflow/providers/redis/sensors}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/redis/sensors/redis_key.py (100%) rename {airflow => providers/src/airflow}/providers/redis/sensors/redis_pub_sub.py (100%) rename {airflow => providers/src/airflow}/providers/salesforce/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/salesforce/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/salesforce/__init__.py (100%) rename {airflow/providers/redis/operators => providers/src/airflow/providers/salesforce/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/salesforce/hooks/salesforce.py (100%) rename {airflow => providers/src/airflow}/providers/salesforce/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/salesforce/operators/bulk.py (100%) rename {airflow => providers/src/airflow}/providers/salesforce/operators/salesforce_apex_rest.py (100%) rename {airflow => providers/src/airflow}/providers/salesforce/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/samba/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/samba/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/samba/__init__.py (100%) rename {airflow/providers/redis/sensors => providers/src/airflow/providers/samba/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/samba/hooks/samba.py (100%) rename {airflow => providers/src/airflow}/providers/samba/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/samba/transfers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/samba/transfers/gcs_to_samba.py (100%) rename {airflow => providers/src/airflow}/providers/segment/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/segment/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/segment/__init__.py (100%) rename {airflow/providers/salesforce => providers/src/airflow/providers/segment}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/segment/hooks/segment.py (100%) rename {airflow/providers/samba/hooks => providers/src/airflow/providers/segment/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/segment/operators/segment_track_event.py (100%) rename {airflow => providers/src/airflow}/providers/segment/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/sendgrid/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/sendgrid/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/sendgrid/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/sendgrid/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/sendgrid/utils/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/sendgrid/utils/emailer.py (100%) rename {airflow => providers/src/airflow}/providers/sftp/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/sftp/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/sftp/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/sftp/decorators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/sftp/decorators/sensors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/sftp/decorators/sensors/sftp.py (100%) rename {airflow => providers/src/airflow}/providers/sftp/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/sftp/hooks/sftp.py (100%) rename {airflow => providers/src/airflow}/providers/sftp/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/sftp/operators/sftp.py (100%) rename {airflow => providers/src/airflow}/providers/sftp/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/sftp/sensors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/sftp/sensors/sftp.py (100%) rename {airflow => providers/src/airflow}/providers/sftp/triggers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/sftp/triggers/sftp.py (100%) rename {airflow => providers/src/airflow}/providers/singularity/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/singularity/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/singularity/__init__.py (100%) rename {airflow/providers/segment/hooks => providers/src/airflow/providers/singularity/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/singularity/operators/singularity.py (100%) rename {airflow => providers/src/airflow}/providers/singularity/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/slack/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/slack/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/slack/__init__.py (100%) rename {airflow/providers/segment/operators => providers/src/airflow/providers/slack/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/slack/hooks/slack.py (100%) rename {airflow => providers/src/airflow}/providers/slack/hooks/slack_webhook.py (100%) rename {airflow => providers/src/airflow}/providers/slack/notifications/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/slack/notifications/slack.py (100%) rename {airflow => providers/src/airflow}/providers/slack/notifications/slack_notifier.py (100%) rename {airflow => providers/src/airflow}/providers/slack/notifications/slack_webhook.py (100%) rename {airflow/providers/singularity => providers/src/airflow/providers/slack}/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/slack/operators/slack.py (100%) rename {airflow => providers/src/airflow}/providers/slack/operators/slack_webhook.py (100%) rename {airflow => providers/src/airflow}/providers/slack/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/slack/transfers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/slack/transfers/base_sql_to_slack.py (100%) rename {airflow => providers/src/airflow}/providers/slack/transfers/sql_to_slack.py (100%) rename {airflow => providers/src/airflow}/providers/slack/transfers/sql_to_slack_webhook.py (100%) rename {airflow => providers/src/airflow}/providers/slack/utils/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/smtp/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/smtp/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/smtp/__init__.py (100%) rename {airflow/providers/slack => providers/src/airflow/providers/smtp}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/smtp/hooks/smtp.py (100%) rename {airflow => providers/src/airflow}/providers/smtp/notifications/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/smtp/notifications/smtp.py (100%) rename {airflow => providers/src/airflow}/providers/smtp/notifications/templates/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/smtp/notifications/templates/email.html (100%) rename {airflow => providers/src/airflow}/providers/smtp/notifications/templates/email_subject.jinja2 (100%) rename {airflow/providers/slack => providers/src/airflow/providers/smtp}/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/smtp/operators/smtp.py (100%) rename {airflow => providers/src/airflow}/providers/smtp/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/snowflake/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/snowflake/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/snowflake/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/decorators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/decorators/snowpark.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/hooks/snowflake.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/hooks/snowflake_sql_api.py (99%) rename {airflow => providers/src/airflow}/providers/snowflake/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/operators/snowflake.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/operators/snowpark.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/snowflake/transfers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/transfers/copy_into_snowflake.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/triggers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/triggers/snowflake_trigger.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/utils/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/utils/common.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/utils/openlineage.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/utils/snowpark.py (100%) rename {airflow => providers/src/airflow}/providers/snowflake/utils/sql_api_generate_jwt.py (100%) rename {airflow => providers/src/airflow}/providers/sqlite/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/sqlite/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/sqlite/__init__.py (100%) rename {airflow/providers/smtp => providers/src/airflow/providers/sqlite}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/sqlite/hooks/sqlite.py (100%) rename {airflow/providers/smtp => providers/src/airflow/providers/sqlite}/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/sqlite/operators/sqlite.py (100%) rename {airflow => providers/src/airflow}/providers/sqlite/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/ssh/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/ssh/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/ssh/__init__.py (100%) rename {airflow/providers/sqlite => providers/src/airflow/providers/ssh}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/ssh/hooks/ssh.py (100%) rename {airflow/providers/sqlite => providers/src/airflow/providers/ssh}/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/ssh/operators/ssh.py (100%) rename {airflow => providers/src/airflow}/providers/ssh/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/standard/CHANGELOG.rst (100%) rename {airflow/providers/ssh/hooks => providers/src/airflow/providers/standard}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/standard/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/standard/operators/bash.py (100%) rename {airflow => providers/src/airflow}/providers/standard/operators/datetime.py (100%) rename {airflow => providers/src/airflow}/providers/standard/operators/weekday.py (100%) rename {airflow => providers/src/airflow}/providers/standard/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/standard/sensors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/standard/sensors/bash.py (100%) rename {airflow => providers/src/airflow}/providers/standard/sensors/date_time.py (100%) rename {airflow => providers/src/airflow}/providers/standard/sensors/time.py (100%) rename {airflow => providers/src/airflow}/providers/standard/sensors/time_delta.py (100%) rename {airflow => providers/src/airflow}/providers/standard/sensors/weekday.py (100%) rename {airflow => providers/src/airflow}/providers/tableau/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/tableau/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/tableau/__init__.py (100%) rename {airflow/providers/ssh/operators => providers/src/airflow/providers/tableau/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/tableau/hooks/tableau.py (100%) rename {airflow => providers/src/airflow}/providers/tableau/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/tableau/operators/tableau.py (100%) rename {airflow => providers/src/airflow}/providers/tableau/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/tableau/sensors/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/tableau/sensors/tableau.py (100%) rename {airflow => providers/src/airflow}/providers/telegram/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/telegram/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/telegram/__init__.py (100%) rename {airflow/providers/standard => providers/src/airflow/providers/telegram/hooks}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/telegram/hooks/telegram.py (100%) rename {airflow/providers/tableau/hooks => providers/src/airflow/providers/telegram/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/telegram/operators/telegram.py (100%) rename {airflow => providers/src/airflow}/providers/telegram/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/teradata/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/teradata/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/teradata/__init__.py (100%) rename {airflow/providers/telegram => providers/src/airflow/providers/teradata}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/teradata/hooks/teradata.py (100%) rename {airflow/providers/telegram => providers/src/airflow/providers/teradata}/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/teradata/operators/teradata.py (100%) rename {airflow => providers/src/airflow}/providers/teradata/operators/teradata_compute_cluster.py (100%) rename {airflow => providers/src/airflow}/providers/teradata/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/teradata/transfers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/teradata/transfers/azure_blob_to_teradata.py (100%) rename {airflow => providers/src/airflow}/providers/teradata/transfers/s3_to_teradata.py (100%) rename {airflow => providers/src/airflow}/providers/teradata/transfers/teradata_to_teradata.py (100%) rename {airflow => providers/src/airflow}/providers/teradata/triggers/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/teradata/triggers/teradata_compute_cluster.py (100%) rename {airflow => providers/src/airflow}/providers/teradata/utils/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/teradata/utils/constants.py (100%) rename {airflow => providers/src/airflow}/providers/trino/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/trino/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/trino/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/trino/assets/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/trino/assets/trino.py (100%) rename {airflow/providers/teradata => providers/src/airflow/providers/trino}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/trino/hooks/trino.py (100%) rename {airflow => providers/src/airflow}/providers/trino/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/trino/operators/trino.py (100%) rename {airflow => providers/src/airflow}/providers/trino/provider.yaml (100%) rename {airflow/providers/teradata/operators => providers/src/airflow/providers/trino/transfers}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/trino/transfers/gcs_to_trino.py (100%) rename {airflow => providers/src/airflow}/providers/vertica/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/vertica/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/vertica/__init__.py (100%) rename {airflow/providers/trino => providers/src/airflow/providers/vertica}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/vertica/hooks/vertica.py (100%) rename {airflow/providers/trino/transfers => providers/src/airflow/providers/vertica/operators}/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/vertica/operators/vertica.py (100%) rename {airflow => providers/src/airflow}/providers/vertica/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/weaviate/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/weaviate/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/weaviate/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/weaviate/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/weaviate/hooks/weaviate.py (100%) rename {airflow => providers/src/airflow}/providers/weaviate/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/weaviate/operators/weaviate.py (100%) rename {airflow => providers/src/airflow}/providers/weaviate/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/yandex/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/yandex/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/yandex/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/hooks/dataproc.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/hooks/yandex.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/hooks/yandexcloud_dataproc.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/hooks/yq.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/links/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/links/yq.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/operators/dataproc.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/operators/yandexcloud_dataproc.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/operators/yq.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/yandex/secrets/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/secrets/lockbox.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/utils/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/utils/credentials.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/utils/defaults.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/utils/fields.py (100%) rename {airflow => providers/src/airflow}/providers/yandex/utils/user_agent.py (100%) rename {airflow => providers/src/airflow}/providers/ydb/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/ydb/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/ydb/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/ydb/hooks/_vendor/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/ydb/hooks/_vendor/dbapi/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/ydb/hooks/_vendor/dbapi/connection.py (100%) rename {airflow => providers/src/airflow}/providers/ydb/hooks/_vendor/dbapi/constants.py (100%) rename {airflow => providers/src/airflow}/providers/ydb/hooks/_vendor/dbapi/cursor.py (100%) rename {airflow => providers/src/airflow}/providers/ydb/hooks/_vendor/dbapi/errors.py (100%) rename {airflow => providers/src/airflow}/providers/ydb/hooks/_vendor/readme.md (100%) rename {airflow => providers/src/airflow}/providers/ydb/hooks/ydb.py (100%) rename {airflow => providers/src/airflow}/providers/ydb/operators/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/ydb/operators/ydb.py (100%) rename {airflow => providers/src/airflow}/providers/ydb/provider.yaml (100%) rename {airflow => providers/src/airflow}/providers/ydb/utils/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/ydb/utils/credentials.py (100%) rename {airflow => providers/src/airflow}/providers/ydb/utils/defaults.py (100%) rename {airflow => providers/src/airflow}/providers/zendesk/.latest-doc-only-change.txt (100%) rename {airflow => providers/src/airflow}/providers/zendesk/CHANGELOG.rst (100%) rename {airflow => providers/src/airflow}/providers/zendesk/__init__.py (100%) rename {airflow/providers/vertica => providers/src/airflow/providers/zendesk}/hooks/__init__.py (100%) rename {airflow => providers/src/airflow}/providers/zendesk/hooks/zendesk.py (100%) rename {airflow => providers/src/airflow}/providers/zendesk/provider.yaml (100%) rename {tests/_internals => providers/tests}/__init__.py (100%) rename {tests/integration/providers => providers/tests/airbyte}/__init__.py (100%) rename {tests/integration/providers/apache => providers/tests/airbyte/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/airbyte/hooks/test_airbyte.py (100%) rename {tests/integration/providers/apache/cassandra => providers/tests/airbyte/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/airbyte/operators/test_airbyte.py (100%) rename {tests/integration/providers/apache/cassandra/hooks => providers/tests/airbyte/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/airbyte/sensors/test_airbyte.py (100%) rename {tests/integration/providers/apache/drill => providers/tests/airbyte/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/airbyte/triggers/test_airbyte.py (100%) rename {tests/integration/providers/apache/drill/hooks => providers/tests/alibaba}/__init__.py (100%) rename {tests/integration/providers/apache/drill/operators => providers/tests/alibaba/cloud}/__init__.py (100%) rename {tests/integration/providers/apache/hive => providers/tests/alibaba/cloud/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/alibaba/cloud/hooks/test_analyticdb_spark.py (99%) rename {tests/providers => providers/tests}/alibaba/cloud/hooks/test_oss.py (99%) rename {tests/integration/providers/apache/hive/transfers => providers/tests/alibaba/cloud/log}/__init__.py (100%) rename {tests/providers => providers/tests}/alibaba/cloud/log/test_oss_task_handler.py (98%) rename {tests/integration/providers/apache/kafka => providers/tests/alibaba/cloud/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/alibaba/cloud/operators/test_analyticdb_spark.py (100%) rename {tests/providers => providers/tests}/alibaba/cloud/operators/test_oss.py (100%) rename {tests/integration/providers/apache/kafka/hooks => providers/tests/alibaba/cloud/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/alibaba/cloud/sensors/test_analyticdb_spark.py (100%) rename {tests/providers => providers/tests}/alibaba/cloud/sensors/test_oss_key.py (100%) rename {tests/integration/providers/apache/kafka/operators => providers/tests/alibaba/cloud/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/alibaba/cloud/utils/analyticdb_spark_mock.py (100%) rename {tests/providers => providers/tests}/alibaba/cloud/utils/oss_mock.py (100%) rename {tests/providers => providers/tests}/alibaba/cloud/utils/test_utils.py (100%) rename {tests/integration/providers/apache/kafka/sensors => providers/tests/amazon}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/.gitignore (100%) rename {tests/integration/providers/apache/kafka/triggers => providers/tests/amazon/aws}/__init__.py (100%) rename {tests/integration/providers/apache/pinot => providers/tests/amazon/aws/assets}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/assets/test_s3.py (100%) rename {tests/integration/providers/apache/pinot/hooks => providers/tests/amazon/aws/auth_manager}/__init__.py (100%) rename {tests/integration/providers/google => providers/tests/amazon/aws/auth_manager/avp}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/auth_manager/avp/test_entities.py (100%) rename {tests/providers => providers/tests}/amazon/aws/auth_manager/avp/test_facade.py (98%) rename {tests/integration/providers/google/cloud => providers/tests/amazon/aws/auth_manager/cli}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/auth_manager/cli/test_avp_commands.py (97%) rename {tests/providers => providers/tests}/amazon/aws/auth_manager/cli/test_definition.py (100%) rename {tests/integration/providers/google/cloud/transfers => providers/tests/amazon/aws/auth_manager/security_manager}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py (96%) rename {tests/providers => providers/tests}/amazon/aws/auth_manager/test_aws_auth_manager.py (97%) rename {tests/providers => providers/tests}/amazon/aws/auth_manager/test_constants.py (100%) rename {tests/providers => providers/tests}/amazon/aws/auth_manager/test_user.py (100%) rename {tests/integration/providers/microsoft => providers/tests/amazon/aws/auth_manager/views}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/auth_manager/views/test_auth.py (80%) rename {tests/integration/providers/microsoft/mssql => providers/tests/amazon/aws/config_templates}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/config_templates/args.json (100%) rename {tests/providers => providers/tests}/amazon/aws/config_templates/job.j2.json (100%) rename {tests/providers => providers/tests}/amazon/aws/config_templates/steps.j2.json (100%) rename {tests/providers => providers/tests}/amazon/aws/config_templates/steps.json (100%) rename {tests/integration/providers/microsoft/mssql/hooks => providers/tests/amazon/aws/deferrable}/__init__.py (100%) rename {tests/integration/providers/mongo => providers/tests/amazon/aws/deferrable/hooks}/__init__.py (100%) rename {tests/integration/providers/mongo/sensors => providers/tests/amazon/aws/executors}/__init__.py (100%) rename {tests/integration/providers/openlineage => providers/tests/amazon/aws/executors/batch}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/executors/batch/test_batch_executor.py (99%) rename {tests/integration/providers/openlineage/operators => providers/tests/amazon/aws/executors/ecs}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/executors/ecs/test_ecs_executor.py (99%) rename {tests/integration/providers/qdrant => providers/tests/amazon/aws/executors/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/executors/utils/test_exponential_backoff_retry.py (100%) rename {tests/integration/providers/qdrant/hooks => providers/tests/amazon/aws/fs}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/fs/test_s3.py (100%) rename {tests/integration/providers/qdrant/operators => providers/tests/amazon/aws/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_appflow.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_athena.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_athena_sql.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_base_aws.py (98%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_batch_client.py (98%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_batch_waiters.py (97%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_bedrock.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_chime.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_cloud_formation.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_comprehend.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_datasync.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_dms.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_dynamodb.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_ec2.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_ecr.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_ecs.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_eks.py (99%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_elasticache_replication_group.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_emr.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_emr_containers.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_emr_serverless.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_eventbridge.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_glacier.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_glue.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_glue_catalog.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_glue_crawler.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_glue_databrew.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_hooks_signature.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_kinesis.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_kinesis_analytics.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_lambda_function.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_logs.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_neptune.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_opensearch_serverless.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_quicksight.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_rds.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_redshift_cluster.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_redshift_data.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_redshift_sql.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_s3.py (99%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_sagemaker.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_secrets_manager.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_ses.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_sns.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_sqs.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_ssm.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_step_function.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_sts.py (100%) rename {tests/providers => providers/tests}/amazon/aws/hooks/test_verified_permissions.py (100%) rename {tests/integration/providers/redis => providers/tests/amazon/aws/infrastructure}/__init__.py (100%) rename {tests/integration/providers/redis/hooks => providers/tests/amazon/aws/infrastructure/example_s3_to_redshift}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/infrastructure/example_s3_to_redshift/outputs.tf (100%) rename {tests/providers => providers/tests}/amazon/aws/infrastructure/example_s3_to_redshift/resources.tf (100%) rename {tests/providers => providers/tests}/amazon/aws/infrastructure/example_s3_to_redshift/variables.tf (100%) rename {tests/integration/providers/redis/operators => providers/tests/amazon/aws/links}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/links/test_athena.py (95%) rename {tests/providers => providers/tests}/amazon/aws/links/test_base_aws.py (98%) rename {tests/providers => providers/tests}/amazon/aws/links/test_batch.py (97%) rename {tests/providers => providers/tests}/amazon/aws/links/test_emr.py (99%) rename {tests/providers => providers/tests}/amazon/aws/links/test_glue.py (95%) rename {tests/providers => providers/tests}/amazon/aws/links/test_logs.py (96%) rename {tests/providers => providers/tests}/amazon/aws/links/test_step_function.py (97%) rename {tests/integration/providers/redis/sensors => providers/tests/amazon/aws/log}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/log/test_cloudwatch_task_handler.py (99%) rename {tests/providers => providers/tests}/amazon/aws/log/test_s3_task_handler.py (99%) rename {tests/integration/providers/trino => providers/tests/amazon/aws/notifications}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/notifications/test_chime.py (100%) rename {tests/providers => providers/tests}/amazon/aws/notifications/test_sns.py (100%) rename {tests/providers => providers/tests}/amazon/aws/notifications/test_sqs.py (100%) rename {tests/integration/providers/trino/hooks => providers/tests/amazon/aws/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/operators/athena_metadata.json (100%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_appflow.py (100%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_athena.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_base_aws.py (100%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_batch.py (100%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_bedrock.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_cloud_formation.py (98%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_comprehend.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_datasync.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_dms.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_ec2.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_ecs.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_eks.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_emr_add_steps.py (97%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_emr_containers.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_emr_create_job_flow.py (95%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_emr_modify_cluster.py (97%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_emr_notebook_execution.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_emr_serverless.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_emr_terminate_job_flow.py (97%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_eventbridge.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_glacier.py (98%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_glue.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_glue_crawler.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_glue_databrew.py (98%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_kinesis_analytics.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_lambda_function.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_neptune.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_quicksight.py (98%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_rds.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_redshift_cluster.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_redshift_data.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_redshift_sql.py (100%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_s3.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_sagemaker_base.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_sagemaker_endpoint.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_sagemaker_endpoint_config.py (98%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_sagemaker_model.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_sagemaker_notebook.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_sagemaker_pipeline.py (98%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_sagemaker_processing.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_sagemaker_training.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_sagemaker_transform.py (99%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_sagemaker_tuning.py (98%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_sns.py (97%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_sqs.py (98%) rename {tests/providers => providers/tests}/amazon/aws/operators/test_step_function.py (95%) rename {tests/integration/providers/ydb => providers/tests/amazon/aws/secrets}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/secrets/test_secrets_manager.py (100%) rename {tests/providers => providers/tests}/amazon/aws/secrets/test_systems_manager.py (99%) rename {tests/integration/providers/ydb/hooks => providers/tests/amazon/aws/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_athena.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_base_aws.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_batch.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_bedrock.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_cloud_formation.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_comprehend.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_dms.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_dynamodb.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_ec2.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_ecs.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_eks.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_emr_base.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_emr_containers.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_emr_job_flow.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_emr_notebook_execution.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_emr_serverless_application.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_emr_serverless_job.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_emr_step.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_glacier.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_glue.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_glue_catalog_partition.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_glue_crawler.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_glue_data_quality.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_kinesis_analytics.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_lambda_function.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_opensearch_serverless.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_quicksight.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_rds.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_redshift_cluster.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_s3.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_sagemaker_automl.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_sagemaker_base.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_sagemaker_endpoint.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_sagemaker_pipeline.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_sagemaker_training.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_sagemaker_transform.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_sagemaker_tuning.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_sqs.py (100%) rename {tests/providers => providers/tests}/amazon/aws/sensors/test_step_function.py (100%) rename {tests/integration/providers/ydb/operators => providers/tests/amazon/aws/system}/__init__.py (100%) rename {tests/providers => providers/tests/amazon/aws/system/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/system/utils/test_helpers.py (96%) rename {tests/providers/airbyte => providers/tests/amazon/aws/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_azure_blob_to_s3.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_base.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_dynamodb_to_s3.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_exasol_to_s3.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_ftp_to_s3.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_gcs_to_s3.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_glacier_to_gcs.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_google_api_to_s3.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_hive_to_dynamodb.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_http_to_s3.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_imap_attachment_to_s3.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_local_to_s3.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_mongo_to_s3.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_redshift_to_s3.py (99%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_s3_to_dynamodb.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_s3_to_ftp.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_s3_to_redshift.py (99%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_s3_to_sftp.py (98%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_s3_to_sql.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_salesforce_to_s3.py (100%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_sftp_to_s3.py (98%) rename {tests/providers => providers/tests}/amazon/aws/transfers/test_sql_to_s3.py (100%) rename {tests/providers/airbyte/hooks => providers/tests/amazon/aws/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_athena.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_base.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_batch.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_bedrock.py (99%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_comprehend.py (98%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_ec2.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_ecs.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_eks.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_emr.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_glue.py (99%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_glue_crawler.py (97%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_glue_databrew.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_kinesis_analytics.py (98%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_lambda_function.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_neptune.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_opensearch_serverless.py (98%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_rds.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_redshift_cluster.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_redshift_data.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_s3.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_sagemaker.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_serialization.py (100%) rename {tests/providers => providers/tests}/amazon/aws/triggers/test_sqs.py (100%) rename {tests/providers/airbyte/operators => providers/tests/amazon/aws/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/eks_test_constants.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/eks_test_utils.py (99%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_connection_wrapper.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_eks_get_token.py (97%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_emailer.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_identifiers.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_mixins.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_openlineage.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_redshift.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_sqs.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_suppress.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_tags.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_task_log_fetcher.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_template_fields.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_utils.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_waiter.py (100%) rename {tests/providers => providers/tests}/amazon/aws/utils/test_waiter_with_logging.py (100%) rename {tests/providers/airbyte/sensors => providers/tests/amazon/aws/waiters}/__init__.py (100%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test.json (100%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test_batch.py (100%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test_bedrock.py (100%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test_bedrock_agent.py (100%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test_comprehend.py (100%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test_custom_waiters.py (100%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test_dynamo.py (100%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test_ecs.py (100%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test_eks.py (97%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test_emr.py (100%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test_glue.py (100%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test_glue_databrew.py (100%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test_kinesis_analytics.py (100%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test_neptune.py (100%) rename {tests/providers => providers/tests}/amazon/aws/waiters/test_opensearch_serverless.py (100%) rename {tests/providers => providers/tests}/amazon/conftest.py (100%) rename {tests/providers/airbyte/triggers => providers/tests/apache}/__init__.py (100%) rename {tests/providers/alibaba => providers/tests/apache/beam}/__init__.py (100%) rename {tests/providers/alibaba/cloud => providers/tests/apache/beam/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/beam/hooks/test_beam.py (100%) rename {tests/providers/alibaba/cloud/hooks => providers/tests/apache/beam/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/beam/operators/test_beam.py (100%) rename {tests/providers/alibaba/cloud/log => providers/tests/apache/beam/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/beam/triggers/test_beam.py (100%) rename {tests/providers/alibaba/cloud/operators => providers/tests/apache/cassandra}/__init__.py (100%) rename {tests/providers/alibaba/cloud => providers/tests/apache/cassandra}/sensors/__init__.py (100%) rename {tests/providers => providers/tests}/apache/cassandra/sensors/test_record.py (100%) rename {tests/providers => providers/tests}/apache/cassandra/sensors/test_table.py (100%) rename {airflow/providers/vertica/operators => providers/tests/apache/drill}/__init__.py (100%) rename {airflow/providers/zendesk => providers/tests/apache/drill}/hooks/__init__.py (100%) rename {tests/providers => providers/tests}/apache/drill/hooks/test_drill.py (94%) rename {tests/providers/apache/drill => providers/tests/apache/druid}/__init__.py (100%) rename {tests/providers/apache/drill => providers/tests/apache/druid}/hooks/__init__.py (100%) rename {tests/providers => providers/tests}/apache/druid/hooks/test_druid.py (100%) rename {tests/providers/apache/druid => providers/tests/apache/druid/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/druid/operators/test_druid.py (100%) rename {tests/providers/alibaba/cloud/utils => providers/tests/apache/druid/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/druid/transfers/test_hive_to_druid.py (100%) rename {tests/providers/apache/druid/hooks => providers/tests/apache/flink}/__init__.py (100%) rename {tests/providers/apache/druid => providers/tests/apache/flink}/operators/__init__.py (100%) rename {tests/providers => providers/tests}/apache/flink/operators/test_flink_kubernetes.py (100%) rename {tests/providers/apache/flink => providers/tests/apache/flink/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/flink/sensors/test_flink_kubernetes.py (100%) rename {tests/providers/apache/flink/operators => providers/tests/apache/hdfs}/__init__.py (100%) rename {tests/providers/apache/flink/sensors => providers/tests/apache/hdfs/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/hdfs/hooks/test_webhdfs.py (100%) rename {tests/providers/apache/hdfs => providers/tests/apache/hdfs/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/hdfs/sensors/test_web_hdfs.py (100%) rename {tests/providers => providers/tests}/apache/hive/__init__.py (100%) rename {tests/providers/apache/hdfs => providers/tests/apache/hive}/hooks/__init__.py (100%) rename {tests/providers => providers/tests}/apache/hive/hooks/query_results.csv (100%) rename {tests/providers => providers/tests}/apache/hive/hooks/test_hive.py (98%) rename {tests/providers/apache/hdfs/sensors => providers/tests/apache/hive/macros}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/hive/macros/test_hive.py (100%) rename {tests/providers/apache/hive/hooks => providers/tests/apache/hive/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/hive/operators/test_hive.py (99%) rename {tests/providers => providers/tests}/apache/hive/operators/test_hive_stats.py (99%) rename {tests/providers/apache/hive/macros => providers/tests/apache/hive/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/hive/sensors/test_hive_partition.py (96%) rename {tests/providers => providers/tests}/apache/hive/sensors/test_metastore_partition.py (96%) rename {tests/providers => providers/tests}/apache/hive/sensors/test_named_hive_partition.py (99%) rename {tests/providers/amazon => providers/tests/apache/hive/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/hive/transfers/test_hive_to_mysql.py (99%) rename {tests/providers => providers/tests}/apache/hive/transfers/test_hive_to_samba.py (99%) rename {tests/providers => providers/tests}/apache/hive/transfers/test_mssql_to_hive.py (100%) rename {tests/providers => providers/tests}/apache/hive/transfers/test_mysql_to_hive.py (100%) rename {tests/providers => providers/tests}/apache/hive/transfers/test_s3_to_hive.py (100%) rename {tests/providers => providers/tests}/apache/hive/transfers/test_vertica_to_hive.py (100%) rename {tests/providers/amazon/aws => providers/tests/apache/iceberg}/__init__.py (100%) rename {tests/providers/amazon/aws/assets => providers/tests/apache/iceberg/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/iceberg/hooks/test_iceberg.py (100%) rename {tests/providers/amazon/aws/auth_manager => providers/tests/apache/impala}/__init__.py (100%) rename {tests/providers/amazon/aws/auth_manager/avp => providers/tests/apache/impala/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/impala/hooks/test_impala.py (100%) rename {tests/providers/amazon/aws/auth_manager/cli => providers/tests/apache/kafka}/__init__.py (100%) rename {tests/providers/amazon/aws/auth_manager/security_manager => providers/tests/apache/kafka/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/kafka/hooks/test_base.py (100%) rename {tests/providers => providers/tests}/apache/kafka/hooks/test_client.py (100%) rename {tests/providers => providers/tests}/apache/kafka/hooks/test_consume.py (100%) rename {tests/providers => providers/tests}/apache/kafka/hooks/test_produce.py (100%) rename {tests/providers/amazon/aws/auth_manager/views => providers/tests/apache/kafka/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/kafka/operators/test_consume.py (98%) rename {tests/providers => providers/tests}/apache/kafka/operators/test_produce.py (97%) rename {tests/providers/amazon/aws/config_templates => providers/tests/apache/kafka/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/kafka/sensors/test_kafka.py (100%) rename {tests/providers/amazon/aws/executors => providers/tests/apache/kafka/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/kafka/triggers/test_await_message.py (96%) rename {tests/providers/apache/hive/operators => providers/tests/apache/kylin}/__init__.py (100%) rename {tests/providers/apache/hive/sensors => providers/tests/apache/kylin/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/kylin/hooks/test_kylin.py (100%) rename {tests/providers/apache/kylin => providers/tests/apache/kylin/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/kylin/operators/test_kylin_cube.py (100%) rename {tests/providers/amazon/aws/executors/batch => providers/tests/apache/livy}/__init__.py (100%) rename {tests/providers/amazon/aws/executors/ecs => providers/tests/apache/livy/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/livy/hooks/test_livy.py (99%) rename {tests/providers/amazon/aws/executors/utils => providers/tests/apache/livy/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/livy/operators/test_livy.py (100%) rename {tests/providers/amazon/aws/fs => providers/tests/apache/livy/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/livy/sensors/test_livy.py (100%) rename {tests/providers/amazon/aws/hooks => providers/tests/apache/livy/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/livy/triggers/test_livy.py (100%) rename {tests/providers/apache/kylin/hooks => providers/tests/apache/pig}/__init__.py (100%) rename {tests/providers/apache/kylin/operators => providers/tests/apache/pig/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/pig/hooks/test_pig.py (100%) rename {tests/providers/amazon/aws/infrastructure => providers/tests/apache/pig/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/pig/operators/test_pig.py (100%) rename {tests/providers/apache/pig => providers/tests/apache/pinot}/__init__.py (100%) rename {tests/providers/apache/pig => providers/tests/apache/pinot}/hooks/__init__.py (100%) rename {tests/providers => providers/tests}/apache/pinot/hooks/test_pinot.py (100%) rename {tests/providers/apache/pinot => providers/tests/apache/spark}/__init__.py (100%) rename {tests/providers/amazon/aws/infrastructure/example_s3_to_redshift => providers/tests/apache/spark/decorators}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/spark/decorators/test_pyspark.py (100%) rename {tests/providers/apache/pinot => providers/tests/apache/spark}/hooks/__init__.py (100%) rename {tests/providers => providers/tests}/apache/spark/hooks/test_spark_connect.py (100%) rename {tests/providers => providers/tests}/apache/spark/hooks/test_spark_jdbc.py (100%) rename {tests/providers => providers/tests}/apache/spark/hooks/test_spark_jdbc_script.py (100%) rename {tests/providers => providers/tests}/apache/spark/hooks/test_spark_sql.py (99%) rename {tests/providers => providers/tests}/apache/spark/hooks/test_spark_submit.py (100%) rename {tests/providers/apache/spark => providers/tests/apache/spark/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/apache/spark/operators/test_spark_jdbc.py (100%) rename {tests/providers => providers/tests}/apache/spark/operators/test_spark_sql.py (100%) rename {tests/providers => providers/tests}/apache/spark/operators/test_spark_submit.py (100%) rename {tests/providers/amazon/aws/links => providers/tests/apprise}/__init__.py (100%) rename {tests/providers/apache/spark => providers/tests/apprise}/hooks/__init__.py (100%) rename {tests/providers => providers/tests}/apprise/hooks/test_apprise.py (100%) rename {tests/providers/apache/spark/operators => providers/tests/apprise/notifications}/__init__.py (100%) rename {tests/providers => providers/tests}/apprise/notifications/test_apprise.py (100%) rename {tests/providers/amazon/aws/log => providers/tests/arangodb}/__init__.py (100%) rename {tests/providers/amazon/aws/notifications => providers/tests/arangodb/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/arangodb/hooks/test_arangodb.py (100%) rename {tests/providers/amazon/aws => providers/tests/arangodb}/operators/__init__.py (100%) rename {tests/providers => providers/tests}/arangodb/operators/test_arangodb.py (100%) rename {tests/providers/amazon/aws/secrets => providers/tests/arangodb/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/arangodb/sensors/test_arangodb.py (100%) rename {tests/providers/apprise/hooks => providers/tests/asana}/__init__.py (100%) rename {tests/providers/apprise/notifications => providers/tests/asana/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/asana/hooks/test_asana.py (100%) rename {tests/providers/asana => providers/tests/asana/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/asana/operators/test_asana_tasks.py (100%) rename {tests/providers/amazon/aws/sensors => providers/tests/atlassian}/__init__.py (100%) rename {tests/providers/amazon/aws/system => providers/tests/atlassian/jira}/__init__.py (100%) rename {tests/providers/amazon/aws/system/utils => providers/tests/atlassian/jira/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/atlassian/jira/hooks/test_jira.py (98%) rename {tests/providers/amazon/aws/transfers => providers/tests/atlassian/jira/notifications}/__init__.py (100%) rename {tests/providers => providers/tests}/atlassian/jira/notifications/test_jira.py (100%) rename {tests/providers/amazon/aws/triggers => providers/tests/atlassian/jira/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/atlassian/jira/operators/test_jira.py (98%) rename {tests/providers/amazon/aws/utils => providers/tests/atlassian/jira/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/atlassian/jira/sensors/test_jira.py (97%) rename {tests/providers/asana/hooks => providers/tests/celery}/__init__.py (100%) rename {tests/providers/amazon/aws/waiters => providers/tests/celery/cli}/__init__.py (100%) rename {tests/providers => providers/tests}/celery/cli/test_celery_command.py (99%) rename {tests/providers/asana/operators => providers/tests/celery/executors}/__init__.py (100%) rename {tests/providers => providers/tests}/celery/executors/test_celery_executor.py (96%) rename {tests/providers => providers/tests}/celery/executors/test_celery_kubernetes_executor.py (100%) rename {tests/providers/apache => providers/tests/celery/log_handlers}/__init__.py (100%) rename {tests/providers => providers/tests}/celery/log_handlers/test_log_handlers.py (96%) rename {tests/providers/celery => providers/tests/celery/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/celery/sensors/test_celery_queue.py (100%) rename {tests/providers/celery/executors => providers/tests/cloudant}/__init__.py (100%) rename {tests/providers/celery/sensors => providers/tests/cloudant/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/cloudant/hooks/test_cloudant.py (100%) rename {tests/providers/apache/beam => providers/tests/cncf}/__init__.py (100%) rename {tests/providers/cloudant => providers/tests/cncf/kubernetes}/__init__.py (100%) rename {tests/providers/apache/beam/hooks => providers/tests/cncf/kubernetes/cli}/__init__.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/cli/test_kubernetes_command.py (99%) rename {tests/providers => providers/tests}/cncf/kubernetes/conftest.py (100%) rename {tests/providers/apache/beam/operators => providers/tests/cncf/kubernetes/data_files}/__init__.py (100%) rename {tests/providers/apache/beam/triggers => providers/tests/cncf/kubernetes/data_files/executor}/__init__.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/data_files/executor/basic_template.yaml (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/data_files/kube_config (100%) rename {tests/providers/apache/cassandra => providers/tests/cncf/kubernetes/data_files/pods}/__init__.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/data_files/pods/generator_base.yaml (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/data_files/pods/generator_base_with_secrets.yaml (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/data_files/pods/template.yaml (100%) rename {tests/providers/apache/cassandra/sensors => providers/tests/cncf/kubernetes/data_files/spark}/__init__.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/data_files/spark/application_template.yaml (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/data_files/spark/application_test.json (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/data_files/spark/application_test.yaml (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.json (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.yaml (100%) rename {tests/providers/apache/druid/transfers => providers/tests/cncf/kubernetes/decorators}/__init__.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/decorators/test_kubernetes.py (100%) rename {tests/providers/apache/hive/transfers => providers/tests/cncf/kubernetes/executors}/__init__.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/executors/test_kubernetes_executor.py (99%) rename {tests/providers => providers/tests}/cncf/kubernetes/executors/test_local_kubernetes_executor.py (100%) rename {tests/providers/apache/iceberg => providers/tests/cncf/kubernetes/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/hooks/test_kubernetes.py (99%) rename {tests/providers/apache/iceberg/hooks => providers/tests/cncf/kubernetes/log_handlers}/__init__.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/log_handlers/test_log_handlers.py (98%) rename {tests/providers/cloudant/hooks => providers/tests/cncf/kubernetes/models}/__init__.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/models/test_secret.py (100%) rename {tests/providers/apache/impala => providers/tests/cncf/kubernetes/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/operators/test_custom_object_launcher.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/operators/test_job.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/operators/test_pod.py (99%) rename {tests/providers => providers/tests}/cncf/kubernetes/operators/test_resource.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/operators/test_spark_kubernetes.py (99%) rename {tests/providers/apache/impala/hooks => providers/tests/cncf/kubernetes/resource_convert}/__init__.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/resource_convert/test_configmap.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/resource_convert/test_env_variable.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/resource_convert/test_secret.py (100%) rename {tests/providers/apache/kafka => providers/tests/cncf/kubernetes/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/sensors/test_spark_kubernetes.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/test_callbacks.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/test_client.py (98%) rename {tests/providers => providers/tests}/cncf/kubernetes/test_kubernetes_helper_functions.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/test_pod_generator.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/test_template_rendering.py (96%) rename {tests/providers/apache/kafka/hooks => providers/tests/cncf/kubernetes/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/triggers/test_job.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/triggers/test_pod.py (100%) rename {tests/providers/apache/kafka/operators => providers/tests/cncf/kubernetes/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/utils/test_k8s_resource_iterator.py (100%) rename {tests/providers => providers/tests}/cncf/kubernetes/utils/test_pod_manager.py (99%) rename {tests/providers/apache/kafka/sensors => providers/tests/cohere}/__init__.py (100%) rename {tests/providers/apache/kafka/triggers => providers/tests/cohere/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/cohere/hooks/test_cohere.py (84%) rename {tests/providers/apache/livy => providers/tests/cohere/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/cohere/operators/test_embedding.py (100%) rename {tests/providers/apache/livy/hooks => providers/tests/common}/__init__.py (100%) rename {tests/providers/apache/livy/operators => providers/tests/common/compat}/__init__.py (100%) rename {tests/providers/apache/livy/sensors => providers/tests/common/compat/lineage}/__init__.py (100%) rename {tests/providers => providers/tests}/common/compat/lineage/test_hook.py (100%) rename {tests/providers/apache/livy/triggers => providers/tests/common/compat/openlineage}/__init__.py (100%) rename {tests/providers => providers/tests}/common/compat/openlineage/test_facet.py (100%) rename {tests/providers/apache/pig/operators => providers/tests/common/compat/openlineage/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/common/compat/openlineage/utils/test_utils.py (100%) rename {tests/providers/apache/spark/decorators => providers/tests/common/compat/security}/__init__.py (100%) rename {tests/providers => providers/tests}/common/compat/security/test_permissions.py (100%) rename {tests/providers/apprise => providers/tests/common/io}/__init__.py (100%) rename {tests/providers/arangodb => providers/tests/common/io/assets}/__init__.py (100%) rename {tests/providers => providers/tests}/common/io/assets/test_file.py (100%) rename {tests/providers/arangodb/hooks => providers/tests/common/io/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/common/io/operators/test_file_transfer.py (97%) rename {tests/providers/arangodb/operators => providers/tests/common/io/xcom}/__init__.py (100%) rename {tests/providers => providers/tests}/common/io/xcom/test_backend.py (96%) rename {tests/providers/arangodb/sensors => providers/tests/common/sql}/__init__.py (100%) rename {tests/providers/atlassian => providers/tests/common/sql/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/common/sql/hooks/test_dbapi.py (99%) rename {tests/providers => providers/tests}/common/sql/hooks/test_sql.py (98%) rename {tests/providers => providers/tests}/common/sql/hooks/test_sqlparse.py (96%) rename {tests/providers/atlassian/jira => providers/tests/common/sql/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/common/sql/operators/test_sql.py (99%) rename {tests/providers => providers/tests}/common/sql/operators/test_sql_execute.py (99%) rename {tests/providers/atlassian/jira/hooks => providers/tests/common/sql/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/common/sql/sensors/test_sql.py (99%) rename {tests/providers => providers/tests}/common/sql/test_utils.py (96%) rename {tests/providers => providers/tests}/conftest.py (81%) rename {tests/providers/cncf/kubernetes => providers/tests/databricks}/__init__.py (100%) rename {tests/providers/cncf/kubernetes/models => providers/tests/databricks/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/databricks/hooks/test_databricks.py (100%) rename {tests/providers => providers/tests}/databricks/hooks/test_databricks_base.py (100%) rename {tests/providers => providers/tests}/databricks/hooks/test_databricks_sql.py (98%) rename {tests/providers/databricks => providers/tests/databricks/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/databricks/operators/test_databricks.py (100%) rename {tests/providers => providers/tests}/databricks/operators/test_databricks_copy.py (100%) rename {tests/providers => providers/tests}/databricks/operators/test_databricks_repos.py (100%) rename {tests/providers => providers/tests}/databricks/operators/test_databricks_sql.py (100%) rename {tests/providers => providers/tests}/databricks/operators/test_databricks_workflow.py (100%) rename {tests/providers/atlassian/jira/notifications => providers/tests/databricks/plugins}/__init__.py (100%) rename {tests/providers => providers/tests}/databricks/plugins/test_databricks_workflow.py (97%) rename {tests/providers/atlassian/jira/operators => providers/tests/databricks/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/databricks/sensors/test_databricks_partition.py (100%) rename {tests/providers => providers/tests}/databricks/sensors/test_databricks_sql.py (100%) rename {tests/providers/databricks/hooks => providers/tests/databricks/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/databricks/triggers/test_databricks.py (100%) rename {tests/providers/atlassian/jira/sensors => providers/tests/databricks/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/databricks/utils/test_databricks.py (100%) rename {tests/providers/databricks/operators => providers/tests/datadog}/__init__.py (100%) rename {tests/providers/databricks/triggers => providers/tests/datadog/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/datadog/hooks/test_datadog.py (100%) rename {tests/providers/datadog => providers/tests/datadog/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/datadog/sensors/test_datadog.py (100%) rename {tests/providers/celery/cli => providers/tests/dbt}/__init__.py (100%) rename {tests/providers/celery/log_handlers => providers/tests/dbt/cloud}/__init__.py (100%) rename {tests/providers/cncf => providers/tests/dbt/cloud/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/dbt/cloud/hooks/test_dbt.py (99%) rename {tests/providers/cncf/kubernetes/cli => providers/tests/dbt/cloud/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/dbt/cloud/operators/test_dbt.py (99%) rename {tests/providers/cncf/kubernetes/data_files => providers/tests/dbt/cloud/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/dbt/cloud/sensors/test_dbt.py (100%) rename {tests/providers/cncf/kubernetes/data_files/executor => providers/tests/dbt/cloud/test_data}/__init__.py (100%) rename {tests/providers => providers/tests}/dbt/cloud/test_data/catalog.json (100%) rename {tests/providers => providers/tests}/dbt/cloud/test_data/job_run.json (100%) rename {tests/providers => providers/tests}/dbt/cloud/test_data/manifest.json (100%) rename {tests/providers => providers/tests}/dbt/cloud/test_data/run_results.json (100%) rename {tests/providers/cncf/kubernetes/data_files/pods => providers/tests/dbt/cloud/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/dbt/cloud/triggers/test_dbt.py (100%) rename {tests/providers/cncf/kubernetes/data_files/spark => providers/tests/dbt/cloud/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/dbt/cloud/utils/test_openlineage.py (100%) create mode 100644 providers/tests/deprecations_ignore.yml rename {tests/providers/datadog/hooks => providers/tests/dingding}/__init__.py (100%) rename {tests/providers/datadog/sensors => providers/tests/dingding/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/dingding/hooks/test_dingding.py (100%) rename {tests/providers/dingding => providers/tests/dingding/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/dingding/operators/test_dingding.py (100%) rename {tests/providers/dingding/hooks => providers/tests/discord}/__init__.py (100%) rename {tests/providers/dingding/operators => providers/tests/discord/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/discord/hooks/test_discord_webhook.py (100%) rename {tests/providers/cncf/kubernetes/decorators => providers/tests/discord/notifications}/__init__.py (100%) rename {tests/providers => providers/tests}/discord/notifications/test_discord.py (100%) rename {tests/providers/discord => providers/tests/discord/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/discord/operators/test_discord_webhook.py (100%) rename {tests/providers/discord/hooks => providers/tests/docker}/__init__.py (100%) rename {tests/providers => providers/tests}/docker/conftest.py (100%) rename {tests/providers/cncf/kubernetes/executors => providers/tests/docker/decorators}/__init__.py (100%) rename {tests/providers => providers/tests}/docker/decorators/test_docker.py (100%) rename {tests/providers/discord/operators => providers/tests/docker/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/docker/hooks/test_docker.py (100%) rename {tests/providers/docker => providers/tests/docker/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/docker/operators/test_docker.py (99%) rename {tests/providers => providers/tests}/docker/operators/test_docker_swarm.py (100%) rename {tests/providers => providers/tests}/docker/test_exceptions.py (100%) rename {tests/providers/docker/hooks => providers/tests/edge}/__init__.py (100%) rename {tests/providers/docker/operators => providers/tests/edge/api_endpoints}/__init__.py (100%) rename {tests/providers => providers/tests}/edge/api_endpoints/test_health_endpoint.py (100%) rename {tests/providers => providers/tests}/edge/api_endpoints/test_rpc_api_endpoint.py (98%) rename {tests/providers/edge => providers/tests/edge/cli}/__init__.py (100%) rename {tests/providers => providers/tests}/edge/cli/test_edge_command.py (99%) rename {tests/providers/edge/api_endpoints => providers/tests/edge/models}/__init__.py (100%) rename {tests/providers => providers/tests}/edge/models/test_edge_job.py (100%) rename {tests/providers => providers/tests}/edge/models/test_edge_logs.py (100%) rename {tests/providers => providers/tests}/edge/models/test_edge_worker.py (100%) rename {tests/providers/edge/cli => providers/tests/edge/plugins}/__init__.py (100%) rename {tests/providers => providers/tests}/edge/plugins/test_edge_executor_plugin.py (97%) rename {tests/providers/edge/models => providers/tests/elasticsearch}/__init__.py (100%) rename {tests/providers/edge/plugins => providers/tests/elasticsearch/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/elasticsearch/hooks/test_elasticsearch.py (100%) rename {tests/providers/cncf/kubernetes/hooks => providers/tests/elasticsearch/log}/__init__.py (100%) rename {tests/providers => providers/tests}/elasticsearch/log/elasticmock/__init__.py (98%) rename {tests/providers => providers/tests}/elasticsearch/log/elasticmock/fake_elasticsearch.py (99%) rename {tests/providers => providers/tests}/elasticsearch/log/elasticmock/utilities/__init__.py (100%) rename {tests/providers => providers/tests}/elasticsearch/log/test_es_json_formatter.py (100%) rename {tests/providers => providers/tests}/elasticsearch/log/test_es_response.py (100%) rename {tests/providers => providers/tests}/elasticsearch/log/test_es_task_handler.py (99%) rename {tests/providers/elasticsearch => providers/tests/email}/__init__.py (100%) rename {tests/providers/elasticsearch/hooks => providers/tests/email/operators}/__init__.py (100%) rename {tests/providers/email => providers/tests/exasol}/__init__.py (100%) rename {tests/providers/email/operators => providers/tests/exasol/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/exasol/hooks/test_exasol.py (100%) rename {tests/providers => providers/tests}/exasol/hooks/test_sql.py (100%) rename {tests/providers/exasol => providers/tests/exasol/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/exasol/operators/test_exasol.py (100%) rename {tests/providers => providers/tests}/exasol/operators/test_exasol_sql.py (100%) rename {tests/providers/exasol/hooks => providers/tests/fab}/__init__.py (100%) rename {tests/providers/exasol/operators => providers/tests/fab/auth_manager}/__init__.py (100%) rename {tests/providers/fab => providers/tests/fab/auth_manager/api}/__init__.py (100%) rename {tests/providers/cncf/kubernetes/log_handlers => providers/tests/fab/auth_manager/api/auth}/__init__.py (100%) rename {tests/providers/cncf/kubernetes/operators => providers/tests/fab/auth_manager/api/auth/backend}/__init__.py (100%) rename {tests/providers => providers/tests}/fab/auth_manager/api/auth/backend/test_basic_auth.py (98%) rename {tests/providers => providers/tests}/fab/auth_manager/api/auth/backend/test_kerberos_auth.py (92%) rename {tests/providers/cncf/kubernetes/resource_convert => providers/tests/fab/auth_manager/api_endpoints}/__init__.py (100%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/api_connexion_utils.py (97%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/remote_user_api_auth_backend.py (100%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_asset_endpoint.py (97%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_auth.py (95%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_backfill_endpoint.py (97%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_cors.py (96%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_dag_endpoint.py (96%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py (97%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py (95%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py (92%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_event_log_endpoint.py (96%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_import_error_endpoint.py (96%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py (99%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py (97%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py (98%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_user_endpoint.py (99%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_user_schema.py (97%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_variable_endpoint.py (93%) rename {tests/providers => providers/tests}/fab/auth_manager/api_endpoints/test_xcom_endpoint.py (97%) rename {tests/providers/cncf/kubernetes/sensors => providers/tests/fab/auth_manager/cli_commands}/__init__.py (100%) rename {tests/providers => providers/tests}/fab/auth_manager/cli_commands/test_db_command.py (100%) rename {tests/providers => providers/tests}/fab/auth_manager/cli_commands/test_definition.py (94%) rename {tests/providers => providers/tests}/fab/auth_manager/cli_commands/test_role_command.py (98%) rename {tests/providers => providers/tests}/fab/auth_manager/cli_commands/test_sync_perm_command.py (96%) rename {tests/providers => providers/tests}/fab/auth_manager/cli_commands/test_user_command.py (99%) rename {tests/providers => providers/tests}/fab/auth_manager/cli_commands/test_utils.py (93%) rename {tests/providers => providers/tests}/fab/auth_manager/conftest.py (91%) rename {tests/providers/cncf/kubernetes/triggers => providers/tests/fab/auth_manager/decorators}/__init__.py (100%) rename {tests/providers => providers/tests}/fab/auth_manager/decorators/test_auth.py (98%) rename {tests/providers/fab/auth_manager => providers/tests/fab/auth_manager/models}/__init__.py (100%) rename {tests/providers => providers/tests}/fab/auth_manager/models/test_anonymous_user.py (93%) rename {tests/providers => providers/tests}/fab/auth_manager/models/test_db.py (93%) rename {tests/providers/cncf/kubernetes/utils => providers/tests/fab/auth_manager/security_manager}/__init__.py (100%) rename {tests/providers => providers/tests}/fab/auth_manager/security_manager/test_constants.py (93%) rename {tests/providers => providers/tests}/fab/auth_manager/security_manager/test_override.py (95%) rename {tests/providers => providers/tests}/fab/auth_manager/test_fab_auth_manager.py (99%) rename {tests/providers => providers/tests}/fab/auth_manager/test_models.py (96%) rename {tests/providers => providers/tests}/fab/auth_manager/test_security.py (98%) rename {tests/providers/fab/auth_manager/api => providers/tests/fab/auth_manager/views}/__init__.py (100%) rename {tests/providers => providers/tests}/fab/auth_manager/views/test_permissions.py (93%) rename {tests/providers => providers/tests}/fab/auth_manager/views/test_roles_list.py (91%) rename {tests/providers => providers/tests}/fab/auth_manager/views/test_user.py (91%) rename {tests/providers => providers/tests}/fab/auth_manager/views/test_user_edit.py (91%) rename {tests/providers => providers/tests}/fab/auth_manager/views/test_user_stats.py (91%) rename {tests/providers/cohere => providers/tests/facebook}/__init__.py (100%) rename {tests/providers/cohere/hooks => providers/tests/facebook/ads}/__init__.py (100%) rename {tests/providers/cohere/operators => providers/tests/facebook/ads/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/facebook/ads/hooks/test_ads.py (100%) rename {tests/providers/fab/auth_manager/models => providers/tests/ftp}/__init__.py (100%) rename {tests/providers/fab/auth_manager/views => providers/tests/ftp/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/ftp/hooks/test_ftp.py (100%) rename {tests/providers/common => providers/tests/ftp/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/ftp/operators/test_ftp.py (100%) rename {tests/providers/ftp => providers/tests/ftp/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/ftp/sensors/test_ftp.py (100%) rename {tests/providers/common/compat => providers/tests/github}/__init__.py (100%) rename {tests/providers/common/compat/lineage => providers/tests/github/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/github/hooks/test_github.py (100%) rename {tests/providers/common/compat/openlineage => providers/tests/github/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/github/operators/test_github.py (100%) rename {tests/providers/common/compat/openlineage/utils => providers/tests/github/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/github/sensors/test_github.py (100%) rename {tests/providers/common/compat/security => providers/tests/google}/__init__.py (100%) rename {tests/providers/common/io => providers/tests/google/ads}/__init__.py (100%) rename {tests/providers/common/io/assets => providers/tests/google/ads/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/google/ads/hooks/test_ads.py (100%) rename {tests/providers/common/io => providers/tests/google/ads}/operators/__init__.py (100%) rename {tests/providers => providers/tests}/google/ads/operators/test_ads.py (100%) rename {tests/providers/common/io/xcom => providers/tests/google/ads/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/google/ads/transfers/test_ads_to_gcs.py (97%) rename {tests/providers/common/sql => providers/tests/google/assets}/__init__.py (100%) rename {tests/providers/google/datasets => providers/tests/google/assets}/test_bigquery.py (100%) rename {tests/providers/common/sql/hooks => providers/tests/google/cloud}/__init__.py (100%) rename {tests/providers/common/sql/operators => providers/tests/google/cloud/_internal_client}/__init__.py (100%) rename {tests/providers => providers/tests}/google/cloud/_internal_client/test_secret_manager_client.py (100%) rename {tests/providers/common/sql/sensors => providers/tests/google/cloud/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_automl.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_bigquery.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_bigquery_dts.py (98%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_bigquery_system.py (94%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_bigtable.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_cloud_batch.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_cloud_build.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_cloud_composer.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_cloud_memorystore.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_cloud_run.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_cloud_sql.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_cloud_storage_transfer_service.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_cloud_storage_transfer_service_async.py (98%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_compute.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_compute_ssh.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_datacatalog.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_dataflow.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_dataform.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_datafusion.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_datapipeline.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_dataplex.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_dataprep.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_dataproc.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_dataproc_metastore.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_datastore.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_dlp.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_functions.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_gcs.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_gdm.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_kms.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_kms_system.py (96%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_kubernetes_engine.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_life_sciences.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_looker.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_mlengine.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_natural_language.py (98%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_os_login.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_pubsub.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_secret_manager.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_secret_manager_system.py (95%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_spanner.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_speech_to_text.py (97%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_stackdriver.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_tasks.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_text_to_speech.py (97%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_translate.py (98%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_video_intelligence.py (98%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_vision.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/test_workflows.py (100%) rename {tests/providers/databricks/plugins => providers/tests/google/cloud/hooks/vertex_ai}/__init__.py (100%) rename {tests/providers => providers/tests}/google/cloud/hooks/vertex_ai/test_auto_ml.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/vertex_ai/test_custom_job.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/vertex_ai/test_dataset.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/vertex_ai/test_endpoint_service.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/vertex_ai/test_generative_model.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/vertex_ai/test_model_service.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/vertex_ai/test_pipeline_job.py (99%) rename {tests/providers => providers/tests}/google/cloud/hooks/vertex_ai/test_prediction_service.py (98%) rename {tests/providers/databricks/sensors => providers/tests/google/cloud/links}/__init__.py (100%) rename {tests/providers => providers/tests}/google/cloud/links/test_translate.py (100%) rename {tests/providers/databricks/utils => providers/tests/google/cloud/log}/__init__.py (100%) rename {tests/providers => providers/tests}/google/cloud/log/test_gcs_task_handler.py (98%) rename {tests/providers => providers/tests}/google/cloud/log/test_gcs_task_handler_system.py (84%) rename {tests/providers => providers/tests}/google/cloud/log/test_stackdriver_task_handler.py (99%) rename {tests/providers => providers/tests}/google/cloud/log/test_stackdriver_task_handler_system.py (88%) rename {tests/providers/dbt => providers/tests/google/cloud/openlineage}/__init__.py (100%) rename {tests/providers => providers/tests}/google/cloud/openlineage/test_mixins.py (95%) rename {tests/providers => providers/tests}/google/cloud/openlineage/test_utils.py (97%) rename {tests/providers/dbt/cloud => providers/tests/google/cloud/operators}/__init__.py (100%) rename {tests/providers/dbt/cloud/hooks => providers/tests/google/cloud/operators/source}/__init__.py (100%) rename {tests/providers/dbt/cloud/operators => providers/tests/google/cloud/operators/source/source_prefix}/__init__.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_automl.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_bigquery.py (99%) rename {tests/providers => providers/tests}/google/cloud/operators/test_bigquery_dts.py (98%) rename {tests/providers => providers/tests}/google/cloud/operators/test_bigtable.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_cloud_base.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_cloud_batch.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_cloud_build.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_cloud_composer.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_cloud_memorystore.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_cloud_run.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_cloud_sql.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_cloud_storage_transfer_service.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_compute.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_datacatalog.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_dataflow.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_dataform.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_datafusion.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_datapipeline.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_dataplex.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_dataprep.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_dataprep_system.py (91%) rename {tests/providers => providers/tests}/google/cloud/operators/test_dataproc.py (99%) rename {tests/providers => providers/tests}/google/cloud/operators/test_dataproc_metastore.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_datastore.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_datastore_system.py (89%) rename {tests/providers => providers/tests}/google/cloud/operators/test_dlp.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_functions.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_kubernetes_engine.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_life_sciences.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_looker.py (98%) rename {tests/providers => providers/tests}/google/cloud/operators/test_mlengine.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_natural_language.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_pubsub.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_spanner.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_speech_to_text.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_stackdriver.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_tasks.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_text_to_speech.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_translate.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_translate_speech.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_vertex_ai.py (99%) rename {tests/providers => providers/tests}/google/cloud/operators/test_video_intelligence.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_vision.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/test_workflows.py (100%) rename {tests/providers/dbt/cloud/sensors => providers/tests/google/cloud/operators/vertex_ai}/__init__.py (100%) rename {tests/providers => providers/tests}/google/cloud/operators/vertex_ai/test_generative_model.py (100%) rename {tests/providers/dbt/cloud/test_data => providers/tests/google/cloud/secrets}/__init__.py (100%) rename {tests/providers => providers/tests}/google/cloud/secrets/test_secret_manager.py (100%) rename {tests/providers => providers/tests}/google/cloud/secrets/test_secret_manager_system.py (95%) rename {tests/providers/dbt/cloud/triggers => providers/tests/google/cloud/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_bigquery.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_bigquery_dts.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_bigtable.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_cloud_composer.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_cloud_storage_transfer_service.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_dataflow.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_datafusion.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_dataplex.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_dataprep.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_dataproc.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_dataproc_metastore.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_looker.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_pubsub.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_tasks.py (100%) rename {tests/providers => providers/tests}/google/cloud/sensors/test_workflows.py (100%) rename {tests/providers/dbt/cloud/utils => providers/tests/google/cloud/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_adls_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_azure_blob_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_azure_fileshare_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_bigquery_to_bigquery.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_bigquery_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_bigquery_to_mssql.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_bigquery_to_mysql.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_bigquery_to_postgres.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_calendar_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_cassandra_to_gcs.py (91%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_facebook_ads_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_facebook_ads_to_gcs_system.py (93%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_gcs_to_bigquery.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_gcs_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_gcs_to_local.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_gcs_to_sftp.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_gdrive_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_gdrive_to_local.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_local_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_mssql_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_mysql_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_oracle_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_postgres_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_s3_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_salesforce_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_salesforce_to_gcs_system.py (84%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_sftp_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_sheets_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_sql_to_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/transfers/test_trino_to_gcs.py (100%) rename {tests/providers/discord/notifications => providers/tests/google/cloud/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_bigquery.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_bigquery_dts.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_cloud_batch.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_cloud_build.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_cloud_composer.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_cloud_run.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_cloud_sql.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_cloud_storage_transfer_service.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_dataflow.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_datafusion.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_dataplex.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_dataproc.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_gcs.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_kubernetes_engine.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_mlengine.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_pubsub.py (100%) rename {tests/providers => providers/tests}/google/cloud/triggers/test_vertex_ai.py (99%) rename {tests/providers/ftp/hooks => providers/tests/google/cloud/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/google/cloud/utils/airflow_util.py (100%) rename {tests/providers => providers/tests}/google/cloud/utils/base_gcp_mock.py (100%) rename {tests/providers => providers/tests}/google/cloud/utils/gcp_authenticator.py (98%) rename {tests/providers => providers/tests}/google/cloud/utils/job_details.json (100%) rename {tests/providers => providers/tests}/google/cloud/utils/out_table_details.json (100%) rename {tests/providers => providers/tests}/google/cloud/utils/script_job_details.json (100%) rename {tests/providers => providers/tests}/google/cloud/utils/table_details.json (100%) rename {tests/providers => providers/tests}/google/cloud/utils/test_credentials_provider.py (98%) rename {tests/providers => providers/tests}/google/cloud/utils/test_datafusion.py (100%) rename {tests/providers => providers/tests}/google/cloud/utils/test_dataproc.py (100%) rename {tests/providers => providers/tests}/google/cloud/utils/test_external_token_supplier.py (100%) rename {tests/providers => providers/tests}/google/cloud/utils/test_field_sanitizer.py (100%) rename {tests/providers => providers/tests}/google/cloud/utils/test_field_validator.py (100%) rename {tests/providers => providers/tests}/google/cloud/utils/test_helpers.py (100%) rename {tests/providers => providers/tests}/google/cloud/utils/test_mlengine_operator_utils.py (100%) rename {tests/providers => providers/tests}/google/cloud/utils/test_mlengine_prediction_summary.py (91%) rename {tests/providers/docker/decorators => providers/tests/google/common}/__init__.py (100%) rename {tests/providers/elasticsearch/log => providers/tests/google/common/auth_backend}/__init__.py (100%) rename {tests/providers => providers/tests}/google/common/auth_backend/test_google_openid.py (95%) rename {tests/providers/fab/auth_manager/api/auth => providers/tests/google/common/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/google/common/hooks/test_base_google.py (99%) rename {tests/providers => providers/tests}/google/common/hooks/test_discovery_api.py (100%) rename {tests/providers => providers/tests}/google/common/test_deprecated.py (100%) rename {tests/providers/fab/auth_manager/api/auth/backend => providers/tests/google/common/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/google/common/utils/test_id_token_credentials.py (100%) rename {tests/providers/fab/auth_manager/api_endpoints => providers/tests/google/firebase}/__init__.py (100%) rename {tests/providers/fab/auth_manager/cli_commands => providers/tests/google/firebase/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/google/firebase/hooks/test_firestore.py (99%) rename {tests/providers/fab/auth_manager/decorators => providers/tests/google/firebase/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/google/firebase/operators/test_firestore.py (100%) rename {tests/providers/ftp/sensors => providers/tests/google/leveldb}/__init__.py (100%) rename {tests/providers/google/cloud/utils => providers/tests/google/leveldb/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/google/leveldb/hooks/test_leveldb.py (100%) rename {tests/providers/google/leveldb => providers/tests/google/leveldb/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/google/leveldb/operators/test_leveldb.py (100%) rename {tests/providers/fab/auth_manager/security_manager => providers/tests/google/marketing_platform}/__init__.py (100%) rename {tests/providers/facebook => providers/tests/google/marketing_platform/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/google/marketing_platform/hooks/test_analytics.py (96%) rename {tests/providers => providers/tests}/google/marketing_platform/hooks/test_analytics_admin.py (99%) rename {tests/providers => providers/tests}/google/marketing_platform/hooks/test_campaign_manager.py (99%) rename {tests/providers => providers/tests}/google/marketing_platform/hooks/test_display_video.py (99%) rename {tests/providers => providers/tests}/google/marketing_platform/hooks/test_search_ads.py (99%) rename {tests/providers/google/leveldb/hooks => providers/tests/google/marketing_platform/links}/__init__.py (100%) rename {tests/providers => providers/tests}/google/marketing_platform/links/test_analytics_admin.py (100%) rename {tests/providers/facebook/ads => providers/tests/google/marketing_platform/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/google/marketing_platform/operators/test_analytics.py (100%) rename {tests/providers => providers/tests}/google/marketing_platform/operators/test_analytics_admin.py (100%) rename {tests/providers => providers/tests}/google/marketing_platform/operators/test_campaign_manager.py (100%) rename {tests/providers => providers/tests}/google/marketing_platform/operators/test_display_video.py (100%) rename {tests/providers => providers/tests}/google/marketing_platform/operators/test_display_video_system.py (91%) rename {tests/providers => providers/tests}/google/marketing_platform/operators/test_search_ads.py (100%) rename {tests/providers/facebook/ads/hooks => providers/tests/google/marketing_platform/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/google/marketing_platform/sensors/test_campaign_manager.py (100%) rename {tests/providers => providers/tests}/google/marketing_platform/sensors/test_display_video.py (100%) rename {tests/providers/google/leveldb/operators => providers/tests/google/suite}/__init__.py (100%) rename {tests/providers/google/marketing_platform/links => providers/tests/google/suite/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/google/suite/hooks/test_calendar.py (98%) rename {tests/providers => providers/tests}/google/suite/hooks/test_drive.py (99%) rename {tests/providers => providers/tests}/google/suite/hooks/test_sheets.py (99%) rename {tests/providers/google/suite => providers/tests/google/suite/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/google/suite/operators/test_sheets.py (100%) rename {tests/providers/google/suite/hooks => providers/tests/google/suite/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/google/suite/sensors/test_drive.py (100%) rename {tests/providers/ftp/operators => providers/tests/google/suite/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/google/suite/transfers/test_gcs_to_gdrive.py (100%) rename {tests/providers => providers/tests}/google/suite/transfers/test_gcs_to_sheets.py (100%) rename {tests/providers => providers/tests}/google/suite/transfers/test_local_to_drive.py (100%) rename {tests/providers => providers/tests}/google/suite/transfers/test_sql_to_sheets.py (100%) rename {tests/providers => providers/tests}/google/test_go_module.py (100%) rename {tests/providers/google/suite/operators => providers/tests/grpc}/__init__.py (100%) rename {tests/providers/google/suite/sensors => providers/tests/grpc/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/grpc/hooks/test_grpc.py (100%) rename {tests/providers/grpc => providers/tests/grpc/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/grpc/operators/test_grpc.py (100%) rename {tests/providers/github => providers/tests/hashicorp}/__init__.py (100%) rename {tests/providers/github/hooks => providers/tests/hashicorp/_internal_client}/__init__.py (100%) rename {tests/providers => providers/tests}/hashicorp/_internal_client/test_vault_client.py (100%) rename {tests/providers/github/operators => providers/tests/hashicorp/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/hashicorp/hooks/test_vault.py (99%) rename {tests/providers/github/sensors => providers/tests/hashicorp/secrets}/__init__.py (100%) rename {tests/providers => providers/tests}/hashicorp/secrets/test_vault.py (100%) rename {tests/providers/grpc/hooks => providers/tests/http}/__init__.py (100%) rename {tests/providers/grpc/operators => providers/tests/http/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/http/hooks/test_http.py (95%) rename {tests/providers/http => providers/tests/http/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/http/operators/test_http.py (100%) rename {tests/providers/http/hooks => providers/tests/http/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/http/sensors/test_http.py (100%) rename {tests/providers/http/operators => providers/tests/http/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/http/triggers/test_http.py (100%) rename {tests/providers/http/sensors => providers/tests/imap}/__init__.py (100%) rename {tests/providers/http/triggers => providers/tests/imap/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/imap/hooks/test_imap.py (99%) rename {tests/providers/imap => providers/tests/imap/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/imap/sensors/test_imap_attachment.py (100%) rename {tests/providers/google => providers/tests/influxdb}/__init__.py (100%) rename {tests/providers/google/ads => providers/tests/influxdb/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/influxdb/hooks/test_influxdb.py (100%) rename {tests/providers/google/ads/hooks => providers/tests/influxdb/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/influxdb/operators/test_influxdb.py (100%) rename {tests/providers/google/ads/operators => providers/tests/integration}/__init__.py (100%) rename {tests/providers/google/ads/transfers => providers/tests/integration/apache}/__init__.py (100%) rename {tests/providers/google/cloud => providers/tests/integration/apache/cassandra}/__init__.py (100%) rename {tests/providers/google/cloud/_internal_client => providers/tests/integration/apache/cassandra/hooks}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/apache/cassandra/hooks/test_cassandra.py (100%) rename {tests/providers/google/cloud/hooks => providers/tests/integration/apache/drill}/__init__.py (100%) rename {tests/providers/google/cloud/hooks/vertex_ai => providers/tests/integration/apache/drill/hooks}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/apache/drill/hooks/test_drill.py (100%) rename {tests/providers/google/cloud/links => providers/tests/integration/apache/drill/operators}/__init__.py (100%) rename {tests/providers/google/cloud/log => providers/tests/integration/apache/hive}/__init__.py (100%) rename {tests/providers/google/cloud/openlineage => providers/tests/integration/apache/hive/transfers}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/apache/hive/transfers/test_mssql_to_hive.py (100%) rename {tests/providers/google/cloud/operators => providers/tests/integration/apache/kafka}/__init__.py (100%) rename {tests/providers/google/cloud/operators/source => providers/tests/integration/apache/kafka/hooks}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/apache/kafka/hooks/test_admin_client.py (100%) rename {tests/integration/providers => providers/tests/integration}/apache/kafka/hooks/test_consumer.py (100%) rename {tests/integration/providers => providers/tests/integration}/apache/kafka/hooks/test_producer.py (100%) rename {tests/providers/google/cloud/operators/source/source_prefix => providers/tests/integration/apache/kafka/operators}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/apache/kafka/operators/test_consume.py (100%) rename {tests/integration/providers => providers/tests/integration}/apache/kafka/operators/test_produce.py (100%) rename {tests/providers/google/cloud/operators/vertex_ai => providers/tests/integration/apache/kafka/sensors}/__init__.py (100%) rename {tests/providers/google/cloud/secrets => providers/tests/integration/apache/kafka/triggers}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/apache/kafka/triggers/test_await_message.py (100%) rename {tests/providers/google/cloud/sensors => providers/tests/integration/apache/pinot}/__init__.py (100%) rename {tests/providers/google/cloud/transfers => providers/tests/integration/apache/pinot/hooks}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/apache/pinot/hooks/test_pinot.py (100%) rename {tests/providers/google/cloud/triggers => providers/tests/integration/google}/__init__.py (100%) rename {tests/providers/google/common => providers/tests/integration/google/cloud}/__init__.py (100%) rename {tests/providers/google/common/auth_backend => providers/tests/integration/google/cloud/transfers}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/google/cloud/transfers/test_bigquery_to_mssql.py (100%) rename {tests/integration/providers => providers/tests/integration}/google/cloud/transfers/test_mssql_to_gcs.py (100%) rename {tests/integration/providers => providers/tests/integration}/google/cloud/transfers/test_trino_to_gcs.py (100%) rename {tests/providers/google/common/hooks => providers/tests/integration/microsoft}/__init__.py (100%) rename {tests/providers/google/common/utils => providers/tests/integration/microsoft/mssql}/__init__.py (100%) rename {tests/providers/google/datasets => providers/tests/integration/microsoft/mssql/hooks}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/microsoft/mssql/hooks/test_mssql.py (100%) rename {tests/providers/google/firebase => providers/tests/integration/mongo}/__init__.py (100%) rename {tests/providers/google/firebase/hooks => providers/tests/integration/mongo/sensors}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/mongo/sensors/test_mongo.py (100%) rename {tests/providers/google/firebase/operators => providers/tests/integration/openlineage}/__init__.py (100%) rename {tests/providers/google/marketing_platform => providers/tests/integration/openlineage/operators}/__init__.py (100%) rename {tests/providers/google/marketing_platform/hooks => providers/tests/integration/qdrant}/__init__.py (100%) rename {tests/providers/google/marketing_platform/operators => providers/tests/integration/qdrant/hooks}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/qdrant/hooks/test_qdrant.py (100%) rename {tests/providers/google/marketing_platform/sensors => providers/tests/integration/qdrant/operators}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/qdrant/operators/test_qdrant_ingest.py (100%) rename {tests/providers/google/suite/transfers => providers/tests/integration/redis}/__init__.py (100%) rename {tests/providers/hashicorp => providers/tests/integration/redis/hooks}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/redis/hooks/test_redis.py (100%) rename {tests/providers/hashicorp/_internal_client => providers/tests/integration/redis/operators}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/redis/operators/test_redis_publish.py (100%) rename {tests/providers/hashicorp/hooks => providers/tests/integration/redis/sensors}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/redis/sensors/test_redis_key.py (100%) rename {tests/integration/providers => providers/tests/integration}/redis/sensors/test_redis_pub_sub.py (100%) rename {tests/providers/hashicorp/secrets => providers/tests/integration/trino}/__init__.py (100%) rename {tests/providers/influxdb => providers/tests/integration/trino/hooks}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/trino/hooks/test_trino.py (100%) rename {tests/providers/influxdb/hooks => providers/tests/integration/ydb}/__init__.py (100%) rename {tests/providers/influxdb/operators => providers/tests/integration/ydb/hooks}/__init__.py (100%) rename {tests/providers/jenkins/hooks => providers/tests/integration/ydb/operators}/__init__.py (100%) rename {tests/integration/providers => providers/tests/integration}/ydb/operators/test_ydb.py (100%) rename {tests/providers/imap/hooks => providers/tests/jdbc}/__init__.py (100%) rename {tests/providers/imap/sensors => providers/tests/jdbc/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/jdbc/hooks/test_jdbc.py (97%) rename {tests/providers/jdbc => providers/tests/jdbc/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/jdbc/operators/test_jdbc.py (100%) rename {tests/providers/jdbc/hooks => providers/tests/jenkins}/__init__.py (100%) rename {tests/providers/microsoft/azure/log => providers/tests/jenkins/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/jenkins/hooks/test_jenkins.py (100%) rename {tests/providers/jdbc => providers/tests/jenkins}/operators/__init__.py (100%) rename {tests/providers => providers/tests}/jenkins/operators/test_jenkins_job_trigger.py (85%) rename {tests/providers/jenkins => providers/tests/jenkins/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/jenkins/sensors/test_jenkins.py (100%) rename {tests/providers/jenkins/operators => providers/tests/microsoft}/__init__.py (100%) rename {tests/providers/jenkins/sensors => providers/tests/microsoft/azure}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/base.py (91%) rename {tests/providers/microsoft => providers/tests/microsoft/azure/fs}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/fs/test_adls.py (100%) rename {tests/providers/microsoft/azure => providers/tests/microsoft/azure/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_adx.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_asb.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_base_azure.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_batch.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_container_instance.py (95%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_container_registry.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_container_volume.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_cosmos.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_data_factory.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_data_lake.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_fileshare.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_msgraph.py (99%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_powerbi.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_synapse.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_synapse_pipeline.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/hooks/test_wasb.py (100%) rename {tests/providers/microsoft/azure/resources => providers/tests/microsoft/azure/log}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/log/test_wasb_task_handler.py (98%) rename {tests/providers/microsoft/azure/fs => providers/tests/microsoft/azure/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/operators/test_adls_create.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/operators/test_adls_delete.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/operators/test_adls_list.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/operators/test_adx.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/operators/test_asb.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/operators/test_batch.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/operators/test_container_instances.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/operators/test_cosmos.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/operators/test_data_factory.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/operators/test_msgraph.py (98%) rename {tests/providers => providers/tests}/microsoft/azure/operators/test_powerbi.py (97%) rename {tests/providers => providers/tests}/microsoft/azure/operators/test_synapse.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/operators/test_wasb_delete_blob.py (100%) rename {tests/providers/microsoft/azure/secrets => providers/tests/microsoft/azure/resources}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/resources/dummy.pdf (100%) rename {tests/providers => providers/tests}/microsoft/azure/resources/next_users.json (100%) rename {tests/providers => providers/tests}/microsoft/azure/resources/status.json (100%) rename {tests/providers => providers/tests}/microsoft/azure/resources/users.json (100%) rename {tests/providers/microsoft/azure/transfers => providers/tests/microsoft/azure/secrets}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/secrets/test_key_vault.py (100%) rename {tests/providers/microsoft/azure/hooks => providers/tests/microsoft/azure/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/sensors/test_cosmos.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/sensors/test_data_factory.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/sensors/test_msgraph.py (95%) rename {tests/providers => providers/tests}/microsoft/azure/sensors/test_wasb.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/test_utils.py (100%) rename {tests/providers/microsoft/azure/triggers => providers/tests/microsoft/azure/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/transfers/test_local_to_adls.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/transfers/test_local_to_wasb.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/transfers/test_oracle_to_azure_data_lake.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/transfers/test_s3_to_wasb.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/transfers/test_sftp_to_wasb.py (100%) rename {tests/providers/microsoft/mssql/operators => providers/tests/microsoft/azure/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/triggers/test_data_factory.py (100%) rename {tests/providers => providers/tests}/microsoft/azure/triggers/test_msgraph.py (98%) rename {tests/providers => providers/tests}/microsoft/azure/triggers/test_powerbi.py (99%) rename {tests/providers => providers/tests}/microsoft/azure/triggers/test_wasb.py (100%) rename {tests/providers => providers/tests}/microsoft/conftest.py (100%) rename {tests/providers/microsoft/azure/operators => providers/tests/microsoft/mssql}/__init__.py (100%) rename {tests/providers/microsoft/azure/sensors => providers/tests/microsoft/mssql/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/mssql/hooks/test_mssql.py (99%) rename {tests/providers/mysql/assets => providers/tests/microsoft/mssql/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/mssql/operators/test_mssql.py (100%) rename {tests/providers/microsoft/mssql => providers/tests/microsoft/mssql/resources}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/mssql/resources/replace.sql (100%) rename {tests/providers/microsoft/mssql/hooks => providers/tests/microsoft/psrp}/__init__.py (100%) rename {tests/providers/microsoft/mssql/resources => providers/tests/microsoft/psrp/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/psrp/hooks/test_psrp.py (96%) rename {tests/providers/microsoft/psrp => providers/tests/microsoft/psrp/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/psrp/operators/test_psrp.py (100%) rename {tests/providers/microsoft/psrp/hooks => providers/tests/microsoft/winrm}/__init__.py (100%) rename {tests/providers/microsoft/psrp/operators => providers/tests/microsoft/winrm/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/winrm/hooks/test_winrm.py (100%) rename {tests/providers/microsoft/winrm => providers/tests/microsoft/winrm/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/microsoft/winrm/operators/test_winrm.py (100%) rename {tests/providers/microsoft/winrm/hooks => providers/tests/mongo}/__init__.py (100%) rename {tests/providers/microsoft/winrm/operators => providers/tests/mongo/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/mongo/hooks/test_mongo.py (99%) rename {tests/providers/mongo => providers/tests/mongo/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/mongo/sensors/test_mongo.py (100%) rename {tests/providers/mongo/hooks => providers/tests/mysql}/__init__.py (100%) rename {tests/providers/mysql/transfers => providers/tests/mysql/assets}/__init__.py (100%) rename {tests/providers => providers/tests}/mysql/assets/test_mysql.py (100%) rename {tests/providers/mongo/sensors => providers/tests/mysql/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/mysql/hooks/test_mysql.py (99%) rename {tests/providers => providers/tests}/mysql/hooks/test_mysql_connector_python.py (100%) rename {tests/providers/mysql => providers/tests/mysql/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/mysql/operators/test_mysql.py (99%) rename {tests/providers/odbc => providers/tests/mysql/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/mysql/transfers/test_presto_to_mysql.py (100%) rename {tests/providers => providers/tests}/mysql/transfers/test_s3_to_mysql.py (100%) rename {tests/providers => providers/tests}/mysql/transfers/test_trino_to_mysql.py (100%) rename {tests/providers => providers/tests}/mysql/transfers/test_vertica_to_mysql.py (100%) rename {tests/providers/mysql/hooks => providers/tests/neo4j}/__init__.py (100%) rename {tests/providers/mysql/operators => providers/tests/neo4j/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/neo4j/hooks/test_neo4j.py (100%) rename {tests/providers/neo4j => providers/tests/neo4j/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/neo4j/operators/test_neo4j.py (100%) rename {tests/providers/odbc/hooks => providers/tests/odbc}/__init__.py (100%) rename {tests/providers/openai => providers/tests/odbc/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/odbc/hooks/test_odbc.py (99%) rename {tests/providers/openai/hooks => providers/tests/openai}/__init__.py (100%) rename {tests/providers/openai/operators => providers/tests/openai/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/openai/hooks/test_openai.py (100%) rename {tests/providers/openai/triggers => providers/tests/openai/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/openai/operators/test_openai.py (100%) rename {tests/providers => providers/tests}/openai/test_exceptions.py (100%) rename {tests/providers/openlineage => providers/tests/openai/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/openai/triggers/test_openai.py (100%) rename {tests/providers/neo4j/hooks => providers/tests/openfaas}/__init__.py (100%) rename {tests/providers/neo4j/operators => providers/tests/openfaas/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/openfaas/hooks/test_openfaas.py (100%) rename {tests/providers/openlineage/extractors => providers/tests/openlineage}/__init__.py (100%) rename {tests/providers/openlineage/plugins => providers/tests/openlineage/extractors}/__init__.py (100%) rename {tests/providers => providers/tests}/openlineage/extractors/test_base.py (99%) rename {tests/providers => providers/tests}/openlineage/extractors/test_bash.py (98%) rename {tests/providers => providers/tests}/openlineage/extractors/test_manager.py (99%) rename {tests/providers => providers/tests}/openlineage/extractors/test_python.py (98%) rename {tests/providers => providers/tests}/openlineage/log_config.py (100%) rename {tests/providers/openlineage/plugins/openlineage_configs => providers/tests/openlineage/plugins}/__init__.py (100%) rename {tests/providers/openlineage/utils => providers/tests/openlineage/plugins/openlineage_configs}/__init__.py (100%) rename {tests/providers => providers/tests}/openlineage/plugins/openlineage_configs/http.yaml (100%) rename {tests/providers => providers/tests}/openlineage/plugins/test_adapter.py (99%) rename {tests/providers => providers/tests}/openlineage/plugins/test_execution.py (98%) rename {tests/providers => providers/tests}/openlineage/plugins/test_facets.py (100%) rename {tests/providers => providers/tests}/openlineage/plugins/test_listener.py (99%) rename {tests/providers => providers/tests}/openlineage/plugins/test_macros.py (100%) rename {tests/providers => providers/tests}/openlineage/plugins/test_openlineage.py (97%) rename {tests/providers => providers/tests}/openlineage/plugins/test_utils.py (99%) rename {tests/providers => providers/tests}/openlineage/test_conf.py (99%) rename {tests/providers => providers/tests}/openlineage/test_sqlparser.py (100%) rename {tests/providers/opensearch => providers/tests/openlineage/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/openlineage/utils/custom_facet_fixture.py (100%) rename {tests/providers => providers/tests}/openlineage/utils/test_selective_enable.py (100%) rename {tests/providers => providers/tests}/openlineage/utils/test_sql.py (100%) rename {tests/providers => providers/tests}/openlineage/utils/test_utils.py (96%) rename {tests/providers/opensearch/hooks => providers/tests/opensearch}/__init__.py (100%) rename {tests/providers => providers/tests}/opensearch/conftest.py (100%) rename {tests/providers/opensearch/log => providers/tests/opensearch/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/opensearch/hooks/test_opensearch.py (100%) rename {tests/providers/opensearch/operators => providers/tests/opensearch/log}/__init__.py (100%) rename {tests/providers => providers/tests}/opensearch/log/test_os_json_formatter.py (100%) rename {tests/providers => providers/tests}/opensearch/log/test_os_response.py (100%) rename {tests/providers => providers/tests}/opensearch/log/test_os_task_handler.py (99%) rename {tests/providers/opsgenie/notifications => providers/tests/opensearch/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/opensearch/operators/test_opensearch.py (100%) rename {tests/providers/openfaas => providers/tests/opsgenie}/__init__.py (100%) rename {tests/providers/openfaas => providers/tests/opsgenie}/hooks/__init__.py (100%) rename {tests/providers => providers/tests}/opsgenie/hooks/test_opsgenie.py (100%) rename {tests/providers/opsgenie/typing => providers/tests/opsgenie/notifications}/__init__.py (100%) rename {tests/providers => providers/tests}/opsgenie/notifications/test_opsgenie.py (100%) rename {tests/providers/opsgenie => providers/tests/opsgenie/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/opsgenie/operators/test_opsgenie.py (100%) rename {tests/providers/oracle/operators => providers/tests/opsgenie/typing}/__init__.py (100%) rename {tests/providers => providers/tests}/opsgenie/typing/test_opsgenie.py (100%) rename {tests/providers/opsgenie/hooks => providers/tests/oracle}/__init__.py (100%) rename {tests/providers/opsgenie/operators => providers/tests/oracle/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/oracle/hooks/test_oracle.py (100%) rename {tests/providers/pagerduty/notifications => providers/tests/oracle/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/oracle/operators/test_oracle.py (100%) rename {tests/providers/oracle => providers/tests/oracle/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/oracle/transfers/test_oracle_to_oracle.py (100%) rename {tests/providers/oracle/hooks => providers/tests/pagerduty}/__init__.py (100%) rename {tests/providers/oracle/transfers => providers/tests/pagerduty/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/pagerduty/hooks/test_pagerduty.py (100%) rename {tests/providers => providers/tests}/pagerduty/hooks/test_pagerduty_events.py (100%) rename {tests/providers/papermill/hooks => providers/tests/pagerduty/notifications}/__init__.py (100%) rename {tests/providers => providers/tests}/pagerduty/notifications/test_pagerduty.py (100%) rename {tests/providers/pagerduty => providers/tests/papermill}/__init__.py (100%) rename {tests/providers/pgvector => providers/tests/papermill/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/papermill/hooks/test_kernel.py (100%) rename {tests/providers/pagerduty/hooks => providers/tests/papermill/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/papermill/operators/test_papermill.py (100%) rename {tests/providers/pgvector/hooks => providers/tests/pgvector}/__init__.py (100%) rename {tests/providers/pgvector/operators => providers/tests/pgvector/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/pgvector/hooks/test_pgvector.py (100%) rename {tests/providers/pinecone => providers/tests/pgvector/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/pgvector/operators/test_pgvector.py (100%) rename {tests/providers/pinecone/hooks => providers/tests/pinecone}/__init__.py (100%) rename {tests/providers/pinecone/operators => providers/tests/pinecone/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/pinecone/hooks/test_pinecone.py (100%) rename {tests/providers/postgres/assets => providers/tests/pinecone/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/pinecone/operators/test_pinecone.py (100%) rename {tests/providers/papermill => providers/tests/postgres}/__init__.py (100%) rename {tests/providers/qdrant => providers/tests/postgres/assets}/__init__.py (100%) rename {tests/providers => providers/tests}/postgres/assets/test_postgres.py (100%) rename {tests/providers/papermill/operators => providers/tests/postgres/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/postgres/hooks/test_postgres.py (100%) rename {tests/providers/postgres => providers/tests/postgres/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/postgres/operators/test_postgres.py (100%) rename {tests/providers/postgres/hooks => providers/tests/presto}/__init__.py (100%) rename {tests/providers/postgres/operators => providers/tests/presto/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/presto/hooks/test_presto.py (100%) rename {tests/providers/presto => providers/tests/presto/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/presto/transfers/test_gcs_to_presto.py (100%) rename {tests/providers/qdrant/hooks => providers/tests/qdrant}/__init__.py (100%) rename {tests/providers/qdrant/operators => providers/tests/qdrant/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/qdrant/hooks/test_qdrant.py (100%) rename {tests/providers/salesforce => providers/tests/qdrant}/operators/__init__.py (100%) rename {tests/providers => providers/tests}/qdrant/operators/test_qdrant.py (100%) rename {tests/providers/presto/hooks => providers/tests/redis}/__init__.py (100%) rename {tests/providers/presto/transfers => providers/tests/redis/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/redis/hooks/test_redis.py (100%) rename {tests/providers/redis => providers/tests/redis/log}/__init__.py (100%) rename {tests/providers => providers/tests}/redis/log/test_redis_task_handler.py (98%) rename {tests/providers/redis/hooks => providers/tests/redis/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/redis/operators/test_redis_publish.py (100%) rename {tests/providers/redis/log => providers/tests/redis/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/redis/sensors/test_redis_key.py (100%) rename {tests/providers => providers/tests}/redis/sensors/test_redis_pub_sub.py (100%) rename {tests/providers/redis/operators => providers/tests/salesforce}/__init__.py (100%) rename {tests/providers/redis/sensors => providers/tests/salesforce/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/salesforce/hooks/test_salesforce.py (100%) rename {tests/providers/sendgrid => providers/tests/salesforce/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/salesforce/operators/test_bulk.py (100%) rename {tests/providers => providers/tests}/salesforce/operators/test_salesforce_apex_rest.py (100%) rename {tests/providers/salesforce => providers/tests/samba}/__init__.py (100%) rename {tests/providers/salesforce => providers/tests/samba}/hooks/__init__.py (100%) rename {tests/providers => providers/tests}/samba/hooks/test_samba.py (100%) rename {tests/providers/samba => providers/tests/samba/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/samba/transfers/test_gcs_to_samba.py (100%) rename {tests/providers/samba/hooks => providers/tests/segment}/__init__.py (100%) rename {tests/providers/samba/transfers => providers/tests/segment/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/segment/hooks/test_segment.py (100%) rename {tests/providers/segment => providers/tests/segment/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/segment/operators/test_segment_track_event.py (100%) rename {tests/providers/sendgrid/utils => providers/tests/sendgrid}/__init__.py (100%) rename {tests/providers/sftp => providers/tests/sendgrid/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/sendgrid/utils/test_emailer.py (100%) rename {tests/providers/sftp/decorators => providers/tests/sftp}/__init__.py (100%) rename {tests/providers/sftp/decorators/sensors => providers/tests/sftp/decorators}/__init__.py (100%) rename {tests/providers/sftp/hooks => providers/tests/sftp/decorators/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/sftp/decorators/sensors/test_sftp.py (100%) rename {tests/providers/sftp/operators => providers/tests/sftp/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/sftp/hooks/test_sftp.py (99%) rename {tests/providers/sftp/sensors => providers/tests/sftp/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/sftp/operators/test_sftp.py (97%) rename {tests/providers/sftp/triggers => providers/tests/sftp/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/sftp/sensors/test_sftp.py (100%) rename {tests/providers/slack/notifications => providers/tests/sftp/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/sftp/triggers/test_sftp.py (100%) rename {tests/providers/segment/hooks => providers/tests/singularity}/__init__.py (100%) rename {tests/providers/segment => providers/tests/singularity}/operators/__init__.py (100%) rename {tests/providers => providers/tests}/singularity/operators/test_singularity.py (100%) rename {tests/providers/singularity => providers/tests/slack}/__init__.py (100%) rename {tests/providers/singularity/operators => providers/tests/slack/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/slack/hooks/test_slack.py (100%) rename {tests/providers => providers/tests}/slack/hooks/test_slack_webhook.py (100%) rename {tests/providers/slack/utils => providers/tests/slack/notifications}/__init__.py (100%) rename {tests/providers => providers/tests}/slack/notifications/test_slack.py (100%) rename {tests/providers => providers/tests}/slack/notifications/test_slack_webhook.py (100%) rename {tests/providers/slack => providers/tests/slack/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/slack/operators/test_slack.py (98%) rename {tests/providers => providers/tests}/slack/operators/test_slack_webhook.py (100%) rename {tests/providers/slack/hooks => providers/tests/slack/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/slack/transfers/conftest.py (100%) rename {tests/providers => providers/tests}/slack/transfers/test_base_sql_to_slack.py (100%) rename {tests/providers => providers/tests}/slack/transfers/test_sql_to_slack.py (100%) rename {tests/providers => providers/tests}/slack/transfers/test_sql_to_slack_webhook.py (96%) rename {tests/providers/smtp/notifications => providers/tests/slack/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/slack/utils/test_utils.py (100%) rename {tests/providers/slack/operators => providers/tests/smtp}/__init__.py (100%) rename {tests/providers/slack/transfers => providers/tests/smtp/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/smtp/hooks/test_smtp.py (99%) rename {tests/providers/snowflake => providers/tests/smtp/notifications}/__init__.py (100%) rename {tests/providers => providers/tests}/smtp/notifications/test_smtp.py (96%) rename {tests/providers/smtp => providers/tests/smtp/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/smtp/operators/test_smtp.py (100%) rename {tests/providers/snowflake/decorators => providers/tests/snowflake}/__init__.py (100%) rename {tests/providers/snowflake/hooks => providers/tests/snowflake/decorators}/__init__.py (100%) rename {tests/providers => providers/tests}/snowflake/decorators/test_snowpark.py (100%) rename {tests/providers/snowflake/operators => providers/tests/snowflake/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/snowflake/hooks/test_snowflake.py (93%) rename {tests/providers => providers/tests}/snowflake/hooks/test_snowflake_sql_api.py (97%) rename {tests/providers => providers/tests}/snowflake/hooks/test_sql.py (100%) rename {tests/providers/snowflake/transfers => providers/tests/snowflake/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/snowflake/operators/test_snowflake.py (100%) rename {tests/providers => providers/tests}/snowflake/operators/test_snowflake_sql.py (100%) rename {tests/providers => providers/tests}/snowflake/operators/test_snowpark.py (100%) rename {tests/providers/snowflake/triggers => providers/tests/snowflake/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/snowflake/transfers/test_copy_into_snowflake.py (100%) rename {tests/providers/snowflake/utils => providers/tests/snowflake/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/snowflake/triggers/test_snowflake.py (100%) rename {tests/providers/standard => providers/tests/snowflake/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/snowflake/utils/test_common.py (100%) rename {tests/providers => providers/tests}/snowflake/utils/test_openlineage.py (100%) rename {tests/providers => providers/tests}/snowflake/utils/test_snowpark.py (100%) rename {tests/providers => providers/tests}/snowflake/utils/test_sql_api_generate_jwt.py (100%) rename {tests/providers/smtp/hooks => providers/tests/sqlite}/__init__.py (100%) rename {tests/providers/smtp/operators => providers/tests/sqlite/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/sqlite/hooks/test_sqlite.py (100%) rename {tests/providers/sqlite => providers/tests/sqlite/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/sqlite/operators/test_sqlite.py (100%) rename {tests/providers/sqlite/hooks => providers/tests/ssh}/__init__.py (100%) rename {tests/providers/sqlite/operators => providers/tests/ssh/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/ssh/hooks/test_ssh.py (99%) rename {tests/providers/ssh => providers/tests/ssh/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/ssh/operators/test_ssh.py (99%) rename {tests/providers/standard/operators => providers/tests/standard}/__init__.py (100%) rename {tests/providers/standard/sensors => providers/tests/standard/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/standard/operators/test_bash.py (99%) rename {tests/providers => providers/tests}/standard/operators/test_datetime.py (99%) rename {tests/providers => providers/tests}/standard/operators/test_weekday.py (99%) rename {tests/providers/tableau => providers/tests/standard/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/standard/sensors/test_bash.py (100%) rename {tests/providers => providers/tests}/standard/sensors/test_date_time.py (100%) rename {tests/providers => providers/tests}/standard/sensors/test_time.py (100%) rename {tests/providers => providers/tests}/standard/sensors/test_time_delta.py (100%) rename {tests/providers => providers/tests}/standard/sensors/test_weekday.py (99%) rename {tests/providers/tableau/operators => providers/tests/system}/__init__.py (100%) rename {tests/providers/tableau/sensors => providers/tests/system/airbyte}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/airbyte/example_airbyte_trigger_job.py (96%) rename {tests/providers/teradata => providers/tests/system/alibaba}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/alibaba/example_adb_spark_batch.py (93%) rename {tests/system/providers => providers/tests/system}/alibaba/example_adb_spark_sql.py (93%) rename {tests/system/providers => providers/tests/system}/alibaba/example_oss_bucket.py (93%) rename {tests/system/providers => providers/tests/system}/alibaba/example_oss_object.py (94%) rename {tests/system/providers => providers/tests/system}/amazon/CONTRIBUTING.md (99%) rename {tests/system/providers => providers/tests/system}/amazon/README.md (100%) rename {tests/providers/ssh/hooks => providers/tests/system/amazon}/__init__.py (100%) rename {tests/providers/ssh/operators => providers/tests/system/amazon/aws}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_appflow.py (94%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_appflow_run.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_athena.py (96%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_azure_blob_to_s3.py (93%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_batch.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_bedrock.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_bedrock_retrieve_and_generate.py (99%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_cloudformation.py (95%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_comprehend.py (95%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_comprehend_document_classifier.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_datasync.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_dms.py (98%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_dynamodb.py (95%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_dynamodb_to_s3.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_ec2.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_ecs.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_ecs_fargate.py (96%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_eks_templated.py (96%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_eks_with_fargate_in_one_step.py (95%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_eks_with_fargate_profile.py (95%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_eks_with_nodegroup_in_one_step.py (95%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_eks_with_nodegroups.py (96%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_emr.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_emr_eks.py (98%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_emr_notebook_execution.py (95%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_emr_serverless.py (96%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_eventbridge.py (95%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_ftp_to_s3.py (92%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_gcs_to_s3.py (95%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_glacier_to_gcs.py (95%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_glue.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_glue_data_quality.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_glue_data_quality_with_recommendation.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_glue_databrew.py (95%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_google_api_sheets_to_s3.py (94%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_google_api_youtube_to_s3.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_hive_to_dynamodb.py (96%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_http_to_s3.py (94%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_imap_attachment_to_s3.py (94%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_kinesis_analytics.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_lambda.py (95%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_local_to_s3.py (93%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_mongo_to_s3.py (93%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_neptune.py (93%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_quicksight.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_rds_event.py (95%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_rds_export.py (96%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_rds_instance.py (95%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_rds_snapshot.py (96%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_redshift.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_redshift_s3_transfers.py (98%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_s3.py (98%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_s3_to_dynamodb.py (96%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_s3_to_ftp.py (92%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_s3_to_sftp.py (92%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_s3_to_sql.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_sagemaker.py (99%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_sagemaker_endpoint.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_sagemaker_notebook.py (94%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_sagemaker_pipeline.py (97%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_salesforce_to_s3.py (93%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_sftp_to_s3.py (92%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_sns.py (92%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_sql_to_s3.py (96%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_sqs.py (94%) rename {tests/system/providers => providers/tests/system}/amazon/aws/example_step_functions.py (95%) rename {tests/providers/trino/assets => providers/tests/system/amazon/aws/tests}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/amazon/aws/tests/test_aws_auth_manager.py (91%) rename {tests/system/providers => providers/tests/system}/amazon/aws/utils/__init__.py (99%) rename {tests/system/providers => providers/tests/system}/amazon/aws/utils/ec2.py (100%) rename {tests/system/providers => providers/tests/system}/amazon/aws/utils/k8s.py (100%) rename {tests/providers/trino/operators => providers/tests/system/apache}/__init__.py (100%) rename {tests/providers/tableau/hooks => providers/tests/system/apache/beam}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/apache/beam/example_beam.py (95%) rename {tests/system/providers => providers/tests/system}/apache/beam/example_beam_java_flink.py (94%) rename {tests/system/providers => providers/tests/system}/apache/beam/example_beam_java_spark.py (94%) rename {tests/system/providers => providers/tests/system}/apache/beam/example_go.py (96%) rename {tests/system/providers => providers/tests/system}/apache/beam/example_go_dataflow.py (95%) rename {tests/system/providers => providers/tests/system}/apache/beam/example_java_dataflow.py (95%) rename {tests/system/providers => providers/tests/system}/apache/beam/example_python.py (97%) rename {tests/system/providers => providers/tests/system}/apache/beam/example_python_async.py (97%) rename {tests/system/providers => providers/tests/system}/apache/beam/example_python_dataflow.py (96%) rename {tests/system/providers => providers/tests/system}/apache/beam/utils.py (100%) rename {tests/providers/weaviate => providers/tests/system/apache/cassandra}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/apache/cassandra/example_cassandra_dag.py (96%) rename {tests/providers/weaviate/hooks => providers/tests/system/apache/drill}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/apache/drill/example_drill_dag.py (95%) rename {tests/providers/weaviate/operators => providers/tests/system/apache/druid}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/apache/druid/example_druid_dag.py (96%) rename {tests/providers/telegram => providers/tests/system/apache/hive}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/apache/hive/example_twitter_README.md (100%) rename {tests/system/providers => providers/tests/system}/apache/hive/example_twitter_dag.py (97%) rename {tests/providers/telegram/hooks => providers/tests/system/apache/iceberg}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/apache/iceberg/example_iceberg.py (95%) rename {tests/providers/yandex => providers/tests/system/apache/kafka}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/apache/kafka/example_dag_event_listener.py (97%) rename {tests/system/providers => providers/tests/system}/apache/kafka/example_dag_hello_kafka.py (98%) rename {tests/providers/yandex/hooks => providers/tests/system/apache/kylin}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/apache/kylin/example_kylin_dag.py (96%) rename {tests/providers/yandex/links => providers/tests/system/apache/livy}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/apache/livy/example_livy.py (95%) rename {tests/providers/yandex/operators => providers/tests/system/apache/pig}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/apache/pig/example_pig.py (95%) rename {tests/providers/yandex/secrets => providers/tests/system/apache/pinot}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/apache/pinot/example_pinot_dag.py (96%) rename {tests/providers/yandex/utils => providers/tests/system/apache/spark}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/apache/spark/example_pyspark.py (96%) rename {tests/system/providers => providers/tests/system}/apache/spark/example_spark_dag.py (97%) rename {tests/providers/ydb => providers/tests/system/asana}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/asana/example_asana.py (97%) rename {tests/providers/ydb/hooks => providers/tests/system/cncf}/__init__.py (100%) rename {tests/providers/ydb/operators => providers/tests/system/cncf/kubernetes}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/cncf/kubernetes/example_kubernetes.py (97%) rename {tests/system/providers => providers/tests/system}/cncf/kubernetes/example_kubernetes_async.py (98%) rename {tests/system/providers => providers/tests/system}/cncf/kubernetes/example_kubernetes_decorator.py (97%) rename {tests/system/providers => providers/tests/system}/cncf/kubernetes/example_kubernetes_job.py (95%) rename {tests/system/providers => providers/tests/system}/cncf/kubernetes/example_kubernetes_resource.py (94%) rename {tests/system/providers => providers/tests/system}/cncf/kubernetes/example_spark_kubernetes.py (95%) rename {tests/system/providers => providers/tests/system}/cncf/kubernetes/example_spark_kubernetes_spark_pi.yaml (100%) rename {tests/system/providers => providers/tests/system}/cncf/kubernetes/spark_job_template.yaml (100%) rename {tests/providers/ydb/utils => providers/tests/system/cohere}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/cohere/example_cohere_embedding_operator.py (96%) rename {tests/providers/zendesk => providers/tests/system/common}/__init__.py (100%) rename {tests/providers/zendesk/hooks => providers/tests/system/common/io}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/common/io/example_file_transfer_local_to_s3.py (94%) rename {tests/system/providers/airbyte => providers/tests/system/common/sql}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/common/sql/example_sql_column_table_check.py (97%) rename {tests/system/providers => providers/tests/system}/common/sql/example_sql_execute_query.py (96%) rename {tests/system/providers/alibaba => providers/tests/system/databricks}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/databricks/example_databricks.py (98%) rename {tests/system/providers => providers/tests/system}/databricks/example_databricks_repos.py (95%) rename {tests/system/providers => providers/tests/system}/databricks/example_databricks_sensors.py (96%) rename {tests/system/providers => providers/tests/system}/databricks/example_databricks_sql.py (97%) rename {tests/system/providers => providers/tests/system}/databricks/example_databricks_workflow.py (97%) rename {tests/system/providers/amazon/aws/tests => providers/tests/system/dbt}/__init__.py (100%) rename {tests/system/providers/apache/cassandra => providers/tests/system/dbt/cloud}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/dbt/cloud/example_dbt_cloud.py (94%) rename {tests/system/providers/apache/drill => providers/tests/system/dingding}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/dingding/example_dingding.py (98%) rename {tests/system/providers/apache/druid => providers/tests/system/docker}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/docker/example_docker.py (96%) rename {tests/system/providers => providers/tests/system}/docker/example_docker_copy_data.py (97%) rename {tests/system/providers => providers/tests/system}/docker/example_docker_swarm.py (95%) rename {tests/system/providers => providers/tests/system}/docker/example_taskflow_api_docker_virtualenv.py (97%) rename {tests/system/providers/apache/kafka => providers/tests/system/elasticsearch}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/elasticsearch/example_elasticsearch_query.py (97%) rename {tests/system/providers/apache/kylin => providers/tests/system/ftp}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/ftp/example_ftp.py (95%) rename {tests/system/providers/apache/livy => providers/tests/system/github}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/github/example_github.py (97%) rename {tests/system/providers => providers/tests/system}/google/README.md (100%) rename {tests/system/providers => providers/tests/system}/google/__init__.py (100%) rename {tests/system/providers/apache/pig => providers/tests/system/google/ads}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/ads/example_ads.py (95%) rename {tests/system/providers/apache/pinot => providers/tests/system/google/cloud}/__init__.py (100%) rename {tests/system/providers/apache/spark => providers/tests/system/google/cloud/automl}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/automl/example_automl_dataset.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/automl/example_automl_translation.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/automl/example_automl_video_classification.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/automl/example_automl_video_tracking.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/automl/example_automl_vision_classification.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/automl/example_automl_vision_object_detection.py (97%) rename {tests/system/providers/asana => providers/tests/system/google/cloud/automl/resources}/__init__.py (100%) rename {tests/system/providers/cncf => providers/tests/system/google/cloud/azure}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/azure/example_azure_blob_to_gcs.py (94%) rename {tests/system/providers => providers/tests/system}/google/cloud/azure/example_azure_fileshare_to_gcs.py (93%) rename {tests/system/providers/cncf/kubernetes => providers/tests/system/google/cloud/bigquery}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_dataset.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_dts.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_operations.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_operations_location.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_queries.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_queries_async.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_sensors.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_tables.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_to_bigquery.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_to_gcs.py (94%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_to_gcs_async.py (94%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_to_mssql.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_to_mysql.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_to_postgres.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_transfer.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/example_bigquery_value_check.py (95%) rename {tests/system/providers/cohere => providers/tests/system/google/cloud/bigquery/resources}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/resources/example_bigquery_query.sql (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/resources/update_table_schema.json (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigquery/resources/us-states.csv (100%) rename {tests/system/providers/common => providers/tests/system/google/cloud/bigtable}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/bigtable/example_bigtable.py (97%) rename {tests/system/providers/common/io => providers/tests/system/google/cloud/cloud_batch}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/cloud_batch/example_cloud_batch.py (96%) rename {tests/system/providers/common/sql => providers/tests/system/google/cloud/cloud_build}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/cloud_build/example_cloud_build.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/cloud_build/example_cloud_build_trigger.py (96%) rename {tests/system/providers/databricks => providers/tests/system/google/cloud/cloud_build/resources}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/cloud_build/resources/example_cloud_build.yaml (100%) rename {tests/system/providers/dbt/cloud => providers/tests/system/google/cloud/cloud_functions}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/cloud_functions/example_functions.py (95%) rename {tests/system/providers/dingding => providers/tests/system/google/cloud/cloud_memorystore}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py (97%) rename {tests/system/providers/docker => providers/tests/system/google/cloud/cloud_run}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/cloud_run/example_cloud_run.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/cloud_run/example_cloud_run_service.py (95%) rename {tests/system/providers/elasticsearch => providers/tests/system/google/cloud/cloud_sql}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/cloud_sql/example_cloud_sql.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/cloud_sql/example_cloud_sql_query.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py (98%) rename {tests/system/providers/ftp => providers/tests/system/google/cloud/composer}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/composer/example_cloud_composer.py (98%) rename {tests/system/providers/github => providers/tests/system/google/cloud/compute}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/compute/example_compute.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/compute/example_compute_igm.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/compute/example_compute_ssh.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/compute/example_compute_ssh_os_login.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/compute/example_compute_ssh_parallel.py (96%) rename {tests/system/providers/google/ads => providers/tests/system/google/cloud/data_loss_prevention}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/data_loss_prevention/example_dlp_info_types.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/data_loss_prevention/example_dlp_inspect_template.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/data_loss_prevention/example_dlp_job.py (94%) rename {tests/system/providers => providers/tests/system}/google/cloud/data_loss_prevention/example_dlp_job_trigger.py (94%) rename {tests/system/providers/google/cloud => providers/tests/system/google/cloud/data_loss_prevention/resources}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/data_loss_prevention/resources/dictionary.txt (100%) rename {tests/system/providers/google/cloud/automl => providers/tests/system/google/cloud/dataflow}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataflow/example_dataflow_go.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataflow/example_dataflow_native_java.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataflow/example_dataflow_native_python.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataflow/example_dataflow_native_python_async.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataflow/example_dataflow_pipeline.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataflow/example_dataflow_sensors_deferrable.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataflow/example_dataflow_sql.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataflow/example_dataflow_streaming_python.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataflow/example_dataflow_template.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataflow/example_dataflow_yaml.py (96%) rename {tests/system/providers/google/cloud/automl => providers/tests/system/google/cloud/dataflow}/resources/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataflow/resources/input.csv (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataflow/resources/schema.json (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataflow/resources/text.txt (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataflow/resources/wordcount.go (100%) rename {tests/system/providers/google/cloud/azure => providers/tests/system/google/cloud/dataform}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataform/example_dataform.py (98%) rename {tests/system/providers/google/cloud/bigquery => providers/tests/system/google/cloud/datafusion}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/datafusion/example_datafusion.py (98%) rename {tests/system/providers/google/cloud/bigquery/resources => providers/tests/system/google/cloud/datapipelines}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/datapipelines/example_datapipeline.py (96%) rename {tests/system/providers/google/cloud/bigtable => providers/tests/system/google/cloud/dataplex}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataplex/example_dataplex.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataplex/example_dataplex_dp.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataplex/example_dataplex_dq.py (98%) rename {tests/system/providers/google/cloud/cloud_batch => providers/tests/system/google/cloud/dataprep}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataprep/example_dataprep.py (98%) rename {tests/system/providers/google/cloud/cloud_build => providers/tests/system/google/cloud/dataproc}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_batch.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_batch_deferrable.py (93%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_batch_persistent.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_cluster_deferrable.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_cluster_diagnose.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_cluster_generator.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_cluster_start_stop.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_cluster_update.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_flink.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_gke.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_hadoop.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_hive.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_pig.py (94%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_presto.py (94%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_pyspark.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_spark.py (94%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_spark_async.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_spark_deferrable.py (94%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_spark_sql.py (94%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_sparkr.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_trino.py (94%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_workflow.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc/example_dataproc_workflow_deferrable.py (95%) rename {tests/system/providers/google/cloud/cloud_build/resources => providers/tests/system/google/cloud/dataproc_metastore}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc_metastore/example_dataproc_metastore.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py (97%) rename {tests/system/providers/google/cloud/cloud_functions => providers/tests/system/google/cloud/datastore}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/datastore/example_datastore_commit.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/datastore/example_datastore_query.py (93%) rename {tests/system/providers => providers/tests/system}/google/cloud/datastore/example_datastore_rollback.py (91%) rename {tests/system/providers/google/cloud/cloud_memorystore => providers/tests/system/google/cloud/gcs}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_calendar_to_gcs.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_firestore.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_gcs_acl.py (94%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_gcs_copy_delete.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_gcs_sensor.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_gcs_to_bigquery.py (93%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_gcs_to_bigquery_async.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_gcs_to_gcs.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_gcs_to_gdrive.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_gcs_to_sheets.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_gcs_transform.py (93%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_gcs_transform_timespan.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_gcs_upload_download.py (94%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_gdrive_to_gcs.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_mssql_to_gcs.py (93%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_mysql_to_gcs.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_oracle_to_gcs.py (92%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_s3_to_gcs.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_sftp_to_gcs.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_sheets.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_sheets_to_gcs.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/example_trino_to_gcs.py (98%) rename {tests/system/providers/google/cloud/cloud_run => providers/tests/system/google/cloud/gcs/resources}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/resources/example_upload.txt (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/resources/tmp.tar.gz (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/resources/transform_script.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/resources/transform_timespan.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/gcs/resources/us-states.csv (100%) rename {tests/system/providers/google/cloud/cloud_sql => providers/tests/system/google/cloud/kubernetes_engine}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/kubernetes_engine/example_kubernetes_engine.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py (95%) rename {tests/system/providers => providers/tests/system}/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py (96%) rename {tests/system/providers/google/cloud/composer => providers/tests/system/google/cloud/life_sciences}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/life_sciences/example_life_sciences.py (95%) rename {tests/system/providers/google/cloud/compute => providers/tests/system/google/cloud/life_sciences/resources}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/life_sciences/resources/file (100%) rename {tests/system/providers/google/cloud/data_loss_prevention => providers/tests/system/google/cloud/ml_engine}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/ml_engine/example_mlengine.py (98%) rename {tests/system/providers/google/cloud/data_loss_prevention/resources => providers/tests/system/google/cloud/natural_language}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/natural_language/example_natural_language.py (97%) rename {tests/system/providers/google/cloud/dataflow => providers/tests/system/google/cloud/pubsub}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/pubsub/example_pubsub.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/pubsub/example_pubsub_deferrable.py (96%) rename {tests/system/providers/google/cloud/dataflow/resources => providers/tests/system/google/cloud/spanner}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/spanner/example_spanner.py (96%) rename {tests/system/providers/google/cloud/dataform => providers/tests/system/google/cloud/speech_to_text}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/speech_to_text/example_speech_to_text.py (94%) rename {tests/system/providers/google/cloud/datafusion => providers/tests/system/google/cloud/sql_to_sheets}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/sql_to_sheets/example_sql_to_sheets.py (98%) rename {tests/system/providers/google/cloud/datapipelines => providers/tests/system/google/cloud/stackdriver}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/stackdriver/example_stackdriver.py (97%) rename {tests/system/providers/google/cloud/dataplex => providers/tests/system/google/cloud/storage_transfer}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py (96%) rename {tests/system/providers/google/cloud/dataprep => providers/tests/system/google/cloud/storage_transfer/resources}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/storage_transfer/resources/transfer_service_gcp_file (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/storage_transfer/resources/transfer_service_gcs_to_gcs_file (100%) rename {tests/system/providers/google/cloud/dataproc => providers/tests/system/google/cloud/tasks}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/tasks/example_queue.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/tasks/example_tasks.py (97%) rename {tests/system/providers/google/cloud/dataproc_metastore => providers/tests/system/google/cloud/text_to_speech}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/text_to_speech/example_text_to_speech.py (93%) rename {tests/system/providers/google/cloud/datastore => providers/tests/system/google/cloud/transfers}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/transfers/example_gcs_to_sftp.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/transfers/example_gdrive_to_local.py (96%) rename {tests/system/providers => providers/tests/system}/google/cloud/transfers/example_postgres_to_gcs.py (98%) rename {tests/system/providers/google/cloud/gcs => providers/tests/system/google/cloud/transfers/resources}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/transfers/resources/empty.txt (100%) rename {tests/system/providers/google/cloud/gcs/resources => providers/tests/system/google/cloud/translate}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/translate/example_translate.py (94%) rename {tests/system/providers/google/cloud/kubernetes_engine => providers/tests/system/google/cloud/translate_speech}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/translate_speech/example_translate_speech.py (96%) rename {tests/system/providers/google/cloud/life_sciences => providers/tests/system/google/cloud/vertex_ai}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py (93%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_custom_container.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_custom_job.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_dataset.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_endpoint.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_generative_model.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py (94%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py (93%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_model_service.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py (97%) rename {tests/system/providers/google/cloud/life_sciences/resources => providers/tests/system/google/cloud/video_intelligence}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/video_intelligence/example_video_intelligence.py (97%) rename {tests/system/providers/google/cloud/ml_engine => providers/tests/system/google/cloud/vision}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/vision/example_vision_annotate_image.py (97%) rename {tests/system/providers => providers/tests/system}/google/cloud/vision/example_vision_autogenerated.py (98%) rename {tests/system/providers => providers/tests/system}/google/cloud/vision/example_vision_explicit.py (98%) rename {tests/system/providers/google/cloud/natural_language => providers/tests/system/google/cloud/workflows}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/cloud/workflows/example_workflows.py (98%) rename {tests/system/providers => providers/tests/system}/google/conftest.py (100%) rename {tests/system/providers/google/cloud/pubsub => providers/tests/system/google/datacatalog}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/datacatalog/example_datacatalog_entries.py (97%) rename {tests/system/providers => providers/tests/system}/google/datacatalog/example_datacatalog_search_catalog.py (97%) rename {tests/system/providers => providers/tests/system}/google/datacatalog/example_datacatalog_tag_templates.py (97%) rename {tests/system/providers => providers/tests/system}/google/datacatalog/example_datacatalog_tags.py (97%) rename {tests/system/providers/google/cloud/spanner => providers/tests/system/google/firebase}/__init__.py (100%) rename {tests/providers/telegram/operators => providers/tests/system/google/leveldb}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/leveldb/example_leveldb.py (94%) rename {tests/providers/teradata/hooks => providers/tests/system/google/marketing_platform}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/marketing_platform/example_analytics_admin.py (98%) rename {tests/system/providers => providers/tests/system}/google/marketing_platform/example_campaign_manager.py (98%) rename {tests/system/providers => providers/tests/system}/google/marketing_platform/example_search_ads.py (97%) rename {tests/system/providers/google/cloud/speech_to_text => providers/tests/system/google/suite}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/suite/example_local_to_drive.py (97%) rename {tests/system/providers/google/cloud/sql_to_sheets => providers/tests/system/google/suite/resources}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/google/suite/resources/test1 (100%) rename {tests/system/providers => providers/tests/system}/google/suite/resources/test2 (100%) rename {tests/system/providers/google/cloud/stackdriver => providers/tests/system/google/workplace}/__init__.py (100%) rename {tests/system/providers/google/cloud/storage_transfer => providers/tests/system/http}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/http/example_http.py (98%) rename {tests/system/providers/google/cloud/storage_transfer/resources => providers/tests/system/influxdb}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/influxdb/example_influxdb.py (94%) rename {tests/system/providers => providers/tests/system}/influxdb/example_influxdb_query.py (95%) rename {tests/system/providers/google/cloud/tasks => providers/tests/system/jdbc}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/jdbc/example_jdbc_queries.py (94%) rename {tests/system/providers/google/cloud/text_to_speech => providers/tests/system/jenkins}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/jenkins/example_jenkins_job_trigger.py (97%) rename {tests/system/providers/google/cloud/transfers => providers/tests/system/microsoft}/__init__.py (100%) rename {tests/providers/teradata/operators => providers/tests/system/microsoft/azure}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_adf_run_pipeline.py (96%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_adls_create.py (93%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_adls_delete.py (93%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_adls_list.py (92%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_azure_batch_operator.py (96%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_azure_container_instances.py (97%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_azure_cosmosdb.py (94%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_azure_service_bus.py (98%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_azure_synapse.py (97%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_fileshare.py (93%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_local_to_adls.py (93%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_local_to_wasb.py (94%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_msfabric.py (94%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_msgraph.py (93%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_powerbi.py (96%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_powerbi_dataset_refresh.py (95%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_s3_to_wasb.py (94%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_sftp_to_wasb.py (95%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_synapse_run_pipeline.py (93%) rename {tests/system/providers => providers/tests/system}/microsoft/azure/example_wasb_sensors.py (96%) rename {tests/system/providers/google/cloud/transfers/resources => providers/tests/system/microsoft/mssql}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/microsoft/mssql/create_table.sql (100%) rename {tests/system/providers => providers/tests/system}/microsoft/mssql/example_mssql.py (97%) rename {tests/system/providers/google/cloud/translate => providers/tests/system/microsoft/winrm}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/microsoft/winrm/example_winrm.py (95%) rename {tests/system/providers/google/cloud/translate_speech => providers/tests/system/mysql}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/mysql/example_mysql.py (95%) rename {tests/system/providers/google/cloud/vertex_ai => providers/tests/system/neo4j}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/neo4j/example_neo4j.py (95%) rename {tests/system/providers/google/cloud/video_intelligence => providers/tests/system/openai}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/openai/example_openai.py (97%) rename {tests/system/providers => providers/tests/system}/openai/example_trigger_batch_operator.py (97%) rename {tests/system/providers/google/cloud/vision => providers/tests/system/opensearch}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/opensearch/example_opensearch.py (96%) rename {tests/system/providers/google/cloud/workflows => providers/tests/system/opsgenie}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/opsgenie/example_opsgenie_alert.py (96%) rename {tests/system/providers => providers/tests/system}/opsgenie/example_opsgenie_notifier.py (96%) rename {tests/providers/teradata/transfers => providers/tests/system/papermill}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/papermill/conftest.py (100%) rename {tests/system/providers => providers/tests/system}/papermill/example_papermill.py (96%) rename {tests/system/providers => providers/tests/system}/papermill/example_papermill_remote_verify.py (97%) rename {tests/system/providers => providers/tests/system}/papermill/example_papermill_verify.py (97%) rename {tests/system/providers => providers/tests/system}/papermill/input_notebook.ipynb (100%) rename {tests/system/providers/google/datacatalog => providers/tests/system/pgvector}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/pgvector/example_pgvector.py (97%) rename {tests/system/providers => providers/tests/system}/pgvector/example_pgvector_openai.py (97%) rename {tests/system/providers/google/firebase => providers/tests/system/pinecone}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/pinecone/example_create_pod_index.py (96%) rename {tests/system/providers => providers/tests/system}/pinecone/example_create_serverless_index.py (96%) rename {tests/system/providers => providers/tests/system}/pinecone/example_dag_pinecone.py (95%) rename {tests/system/providers => providers/tests/system}/pinecone/example_pinecone_cohere.py (97%) rename {tests/system/providers => providers/tests/system}/pinecone/example_pinecone_openai.py (98%) rename {tests/system/providers/google/suite => providers/tests/system/postgres}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/postgres/example_postgres.py (96%) rename {tests/system/providers/google/suite/resources => providers/tests/system/presto}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/presto/example_gcs_to_presto.py (95%) rename {tests/system/providers/google/workplace => providers/tests/system/qdrant}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/qdrant/example_dag_qdrant.py (96%) rename {tests/system/providers/http => providers/tests/system/redis}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/redis/example_redis_publish.py (95%) rename {tests/system/providers/influxdb => providers/tests/system/salesforce}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/salesforce/example_bulk.py (97%) rename {tests/system/providers => providers/tests/system}/salesforce/example_salesforce_apex_rest.py (95%) rename {tests/system/providers/jdbc => providers/tests/system/samba}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/samba/example_gcs_to_samba.py (96%) rename {tests/system/providers/jenkins => providers/tests/system/sftp}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/sftp/example_sftp_sensor.py (96%) rename {tests/system/providers/microsoft/mssql => providers/tests/system/singularity}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/singularity/example_singularity.py (95%) rename {tests/system/providers/microsoft/winrm => providers/tests/system/slack}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/slack/example_slack.py (97%) rename {tests/system/providers => providers/tests/system}/slack/example_slack_webhook.py (97%) rename {tests/system/providers => providers/tests/system}/slack/example_sql_to_slack.py (96%) rename {tests/system/providers => providers/tests/system}/slack/example_sql_to_slack_webhook.py (95%) rename {tests/system/providers/mysql => providers/tests/system/snowflake}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/snowflake/example_copy_into_snowflake.py (96%) rename {tests/system/providers => providers/tests/system}/snowflake/example_snowflake.py (97%) rename {tests/system/providers => providers/tests/system}/snowflake/example_snowflake_snowflake_op_template_file.sql (100%) rename {tests/system/providers => providers/tests/system}/snowflake/example_snowpark_decorator.py (97%) rename {tests/system/providers => providers/tests/system}/snowflake/example_snowpark_operator.py (97%) rename {tests/system/providers/neo4j => providers/tests/system/sqlite}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/sqlite/create_table.sql (100%) rename {tests/system/providers => providers/tests/system}/sqlite/example_sqlite.py (96%) rename {tests/system/providers/openai => providers/tests/system/tableau}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/tableau/example_tableau.py (97%) rename {tests/system/providers/opensearch => providers/tests/system/telegram}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/telegram/example_telegram.py (95%) rename {tests/system/providers/opsgenie => providers/tests/system/teradata}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/teradata/create_ssl_table.sql (100%) rename {tests/system/providers => providers/tests/system}/teradata/create_table.sql (100%) rename {tests/system/providers => providers/tests/system}/teradata/example_azure_blob_to_teradata_transfer.py (98%) rename {tests/system/providers => providers/tests/system}/teradata/example_s3_to_teradata_transfer.py (98%) rename {tests/system/providers => providers/tests/system}/teradata/example_ssl_teradata.py (97%) rename {tests/system/providers => providers/tests/system}/teradata/example_teradata.py (97%) rename {tests/system/providers => providers/tests/system}/teradata/example_teradata_call_sp.py (97%) rename {tests/system/providers => providers/tests/system}/teradata/example_teradata_compute_cluster.py (97%) rename {tests/system/providers => providers/tests/system}/teradata/example_teradata_to_teradata_transfer.py (97%) rename {tests/system/providers/pgvector => providers/tests/system/trino}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/trino/example_gcs_to_trino.py (95%) rename {tests/system/providers => providers/tests/system}/trino/example_trino.py (97%) rename {tests/system/providers/pinecone => providers/tests/system/weaviate}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/weaviate/example_weaviate_cohere.py (98%) rename {tests/system/providers => providers/tests/system}/weaviate/example_weaviate_dynamic_mapping_dag.py (97%) rename {tests/system/providers => providers/tests/system}/weaviate/example_weaviate_openai.py (98%) rename {tests/system/providers => providers/tests/system}/weaviate/example_weaviate_operator.py (99%) rename {tests/system/providers => providers/tests/system}/weaviate/example_weaviate_using_hook.py (98%) rename {tests/system/providers => providers/tests/system}/weaviate/example_weaviate_vectorizer_dag.py (97%) rename {tests/system/providers => providers/tests/system}/weaviate/example_weaviate_without_vectorizer_dag.py (98%) rename {tests/system/providers => providers/tests/system}/weaviate/jeopardy_data_with_vectors.json (100%) rename {tests/system/providers => providers/tests/system}/weaviate/jeopardy_data_without_vectors.json (100%) rename {tests/system/providers => providers/tests/system}/weaviate/jeopardy_doc_data_without_vectors.json (100%) rename {tests/system/providers/postgres => providers/tests/system/yandex}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/yandex/example_yandexcloud.py (97%) rename {tests/system/providers => providers/tests/system}/yandex/example_yandexcloud_dataproc.py (96%) rename {tests/system/providers => providers/tests/system}/yandex/example_yandexcloud_dataproc_lightweight.py (92%) rename {tests/system/providers => providers/tests/system}/yandex/example_yandexcloud_yq.py (88%) rename {tests/system/providers/presto => providers/tests/system/ydb}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/ydb/example_ydb.py (97%) rename {tests/system/providers/qdrant => providers/tests/system/zendesk}/__init__.py (100%) rename {tests/system/providers => providers/tests/system}/zendesk/example_zendesk_custom_get.py (95%) rename {tests/system/providers/redis => providers/tests/tableau}/__init__.py (100%) rename {tests/providers/teradata/triggers => providers/tests/tableau/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/tableau/hooks/test_tableau.py (95%) rename {tests/system/providers/salesforce => providers/tests/tableau/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/tableau/operators/test_tableau.py (100%) rename {tests/system/providers/samba => providers/tests/tableau/sensors}/__init__.py (100%) rename {tests/providers => providers/tests}/tableau/sensors/test_tableau.py (100%) rename {tests/providers/teradata/utils => providers/tests/telegram}/__init__.py (100%) rename {tests/providers/trino => providers/tests/telegram/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/telegram/hooks/test_telegram.py (100%) rename {tests/providers/trino/hooks => providers/tests/telegram/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/telegram/operators/test_telegram.py (100%) rename {tests/system/providers/sftp => providers/tests/teradata}/__init__.py (100%) rename {tests/providers/trino/transfers => providers/tests/teradata/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/teradata/hooks/test_teradata.py (98%) rename {tests/providers/vertica => providers/tests/teradata/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/teradata/operators/test_teradata.py (100%) rename {tests/providers => providers/tests}/teradata/operators/test_teradata_compute_cluster.py (100%) rename {tests/providers/vertica/hooks => providers/tests/teradata/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/teradata/transfers/test_azure_blob_to_teradata.py (100%) rename {tests/providers => providers/tests}/teradata/transfers/test_s3_to_teradata.py (100%) rename {tests/providers => providers/tests}/teradata/transfers/test_teradata_to_teradata.py (100%) rename {tests/providers/vertica/operators => providers/tests/teradata/triggers}/__init__.py (100%) rename {tests/providers => providers/tests}/teradata/triggers/test_teradata_compute_cluster.py (100%) rename {tests/system/providers => providers/tests/teradata/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/teradata/utils/test_constants.py (100%) rename {tests/system/providers/amazon => providers/tests/trino}/__init__.py (100%) rename {tests/system/providers/singularity => providers/tests/trino/assets}/__init__.py (100%) rename {tests/providers => providers/tests}/trino/assets/test_trino.py (100%) rename {tests/system/providers/amazon/aws => providers/tests/trino/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/trino/hooks/test_trino.py (100%) rename {tests/system/providers/slack => providers/tests/trino/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/trino/operators/test_trino.py (100%) rename {tests/system/providers/apache => providers/tests/trino/transfers}/__init__.py (100%) rename {tests/providers => providers/tests}/trino/transfers/test_gcs_to_trino.py (100%) rename {tests/system/providers/apache/beam => providers/tests/vertica}/__init__.py (100%) rename {tests/system/providers/apache/hive => providers/tests/vertica/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/vertica/hooks/test_vertica.py (100%) rename {tests/system/providers/apache/iceberg => providers/tests/vertica/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/vertica/operators/test_vertica.py (100%) rename {tests/system/providers/snowflake => providers/tests/weaviate}/__init__.py (100%) rename {tests/system/providers/sqlite => providers/tests/weaviate/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/weaviate/hooks/test_weaviate.py (100%) rename {tests/system/providers/tableau => providers/tests/weaviate/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/weaviate/operators/test_weaviate.py (100%) rename {tests/system/providers/telegram => providers/tests/yandex}/__init__.py (100%) rename {tests/system/providers/teradata => providers/tests/yandex/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/yandex/hooks/test_dataproc.py (100%) rename {tests/providers => providers/tests}/yandex/hooks/test_yandex.py (91%) rename {tests/providers => providers/tests}/yandex/hooks/test_yq.py (100%) rename {tests/system/providers/trino => providers/tests/yandex/links}/__init__.py (100%) rename {tests/providers => providers/tests}/yandex/links/test_yq.py (94%) rename {tests/system/providers/weaviate => providers/tests/yandex/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/yandex/operators/test_dataproc.py (100%) rename {tests/providers => providers/tests}/yandex/operators/test_yq.py (98%) rename {tests/system/providers/yandex => providers/tests/yandex/secrets}/__init__.py (100%) rename {tests/providers => providers/tests}/yandex/secrets/test_lockbox.py (100%) rename {tests/system/providers/ydb => providers/tests/yandex/utils}/__init__.py (100%) rename {tests/providers => providers/tests}/yandex/utils/test_credentials.py (100%) rename {tests/providers => providers/tests}/yandex/utils/test_defaults.py (100%) rename {tests/providers => providers/tests}/yandex/utils/test_fields.py (100%) rename {tests/providers => providers/tests}/yandex/utils/test_user_agent.py (100%) rename {tests/system/providers/zendesk => providers/tests/ydb}/__init__.py (100%) rename {tests/test_utils/operators => providers/tests/ydb/hooks}/__init__.py (100%) rename {tests/providers => providers/tests}/ydb/hooks/test_ydb.py (100%) rename {tests/test_utils/perf => providers/tests/ydb/operators}/__init__.py (100%) rename {tests/providers => providers/tests}/ydb/operators/test_ydb.py (100%) rename tests/providers/ydb/utils/test_defaults.py => providers/tests/ydb/utils/__init__.py (100%) rename {tests/providers => providers/tests}/ydb/utils/test_credentials.py (100%) rename tests/system/providers/google/leveldb/__init__.py => providers/tests/ydb/utils/test_defaults.py (99%) rename {tests/system/providers/microsoft => providers/tests/zendesk}/__init__.py (99%) rename {tests/system/providers/dbt => providers/tests/zendesk/hooks}/__init__.py (99%) rename {tests/providers => providers/tests}/zendesk/hooks/test_zendesk.py (100%) delete mode 100644 tests/system/providers/microsoft/azure/__init__.py delete mode 100644 tests/system/providers/papermill/__init__.py diff --git a/.dockerignore b/.dockerignore index 31ef8bb9ac260..bdbf5fb0883af 100644 --- a/.dockerignore +++ b/.dockerignore @@ -34,6 +34,7 @@ !chart !docs !licenses +!providers/ # Add those folders to the context so that they are available in the CI container !scripts diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index be62d541f0dea..019ea900f8e12 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -19,532 +19,532 @@ labelPRBasedOnFilePath: provider:airbyte: - - airflow/providers/airbyte/**/* + - providers/src/airflow/providers/airbyte/**/* - docs/apache-airflow-providers-airbyte/**/* - - tests/providers/airbyte/**/* - - tests/system/providers/airbyte/**/* + - providers/tests/airbyte/**/* + - providers/tests/system/airbyte/**/* provider:alibaba: - - airflow/providers/alibaba/**/* + - providers/src/airflow/providers/alibaba/**/* - docs/apache-airflow-providers-alibaba/**/* - - tests/providers/alibaba/**/* - - tests/system/providers/alibaba/**/* + - providers/tests/alibaba/**/* + - providers/tests/system/alibaba/**/* provider:amazon-aws: - - airflow/providers/amazon/aws/**/* - - tests/providers/amazon/aws/**/* + - providers/src/airflow/providers/amazon/aws/**/* + - providers/tests/amazon/aws/**/* - docs/apache-airflow-providers-amazon/**/* - - tests/system/providers/amazon/aws/**/* + - providers/tests/system/amazon/aws/**/* provider:apache-beam: - - airflow/providers/apache/beam/**/* + - providers/src/airflow/providers/apache/beam/**/* - docs/apache-airflow-providers-apache-beam/**/* - - tests/providers/apache/beam/**/* - - tests/system/providers/apache/beam/**/* + - providers/tests/apache/beam/**/* + - providers/tests/system/apache/beam/**/* provider:apache-cassandra: - - airflow/providers/apache/cassandra/**/* + - providers/src/airflow/providers/apache/cassandra/**/* - docs/apache-airflow-providers-apache-cassandra/**/* - - tests/providers/apache/cassandra/**/* - - tests/system/providers/apache/cassandra/**/* + - providers/tests/apache/cassandra/**/* + - providers/tests/system/apache/cassandra/**/* provider:apache-drill: - - airflow/providers/apache/drill/**/* + - providers/src/airflow/providers/apache/drill/**/* - docs/apache-airflow-providers-apache-drill/**/* - - tests/providers/apache/drill/**/* - - tests/system/providers/apache/drill/**/* + - providers/tests/apache/drill/**/* + - providers/tests/system/apache/drill/**/* provider:apache-druid: - - airflow/providers/apache/druid/**/* + - providers/src/airflow/providers/apache/druid/**/* - docs/apache-airflow-providers-apache-druid/**/* - - tests/providers/apache/druid/**/* - - tests/system/providers/apache/druid/**/* + - providers/tests/apache/druid/**/* + - providers/tests/system/apache/druid/**/* provider:apache-flink: - - airflow/providers/apache/flink/**/* + - providers/src/airflow/providers/apache/flink/**/* - docs/apache-airflow-providers-apache-flink/**/* - - tests/providers/apache/flink/**/* + - providers/tests/apache/flink/**/* provider:apache-hdfs: - - airflow/providers/apache/hdfs/**/* + - providers/src/airflow/providers/apache/hdfs/**/* - docs/apache-airflow-providers-apache-hdfs/**/* - - tests/providers/apache/hdfs/**/* + - providers/tests/apache/hdfs/**/* provider:apache-hive: - - airflow/providers/apache/hive/**/* + - providers/src/airflow/providers/apache/hive/**/* - docs/apache-airflow-providers-apache-hive/**/* - - tests/providers/apache/hive/**/* - - tests/system/providers/apache/hive/**/* + - providers/tests/apache/hive/**/* + - providers/tests/system/apache/hive/**/* provider:apache-iceberg: - - airflow/providers/apache/iceberg/**/* + - providers/src/airflow/providers/apache/iceberg/**/* - docs/apache-airflow-providers-apache-iceberg/**/* - - tests/providers/apache/iceberg/**/* - - tests/system/providers/apache/iceberg/**/* + - providers/tests/apache/iceberg/**/* + - providers/tests/system/apache/iceberg/**/* provider:apache-impala: - - airflow/providers/apache/impala/**/* + - providers/src/airflow/providers/apache/impala/**/* - docs/apache-airflow-providers-apache-impala/**/* - - tests/providers/apache/impala/**/* + - providers/tests/apache/impala/**/* provider:apache-kafka: - - airflow/providers/apache/kafka/**/* + - providers/src/airflow/providers/apache/kafka/**/* - docs/apache-airflow-providers-apache-kafka/**/* - - tests/providers/apache/kafka/**/* - - tests/system/providers/apache/kafka/**/* + - providers/tests/apache/kafka/**/* + - providers/tests/system/apache/kafka/**/* provider:apache-kylin: - - airflow/providers/apache/kylin/**/* + - providers/src/airflow/providers/apache/kylin/**/* - docs/apache-airflow-providers-apache-kylin/**/* - - tests/providers/apache/kylin/**/* - - tests/system/providers/apache/kylin/**/* + - providers/tests/apache/kylin/**/* + - providers/tests/system/apache/kylin/**/* provider:apache-livy: - - airflow/providers/apache/livy/**/* + - providers/src/airflow/providers/apache/livy/**/* - docs/apache-airflow-providers-apache-livy/**/* - - tests/providers/apache/livy/**/* - - tests/system/providers/apache/livy/**/* + - providers/tests/apache/livy/**/* + - providers/tests/system/apache/livy/**/* provider:apache-pig: - - airflow/providers/apache/pig/**/* + - providers/src/airflow/providers/apache/pig/**/* - docs/apache-airflow-providers-apache-pig/**/* - - tests/providers/apache/pig/**/* - - tests/system/providers/apache/pig/**/* + - providers/tests/apache/pig/**/* + - providers/tests/system/apache/pig/**/* provider:apache-pinot: - - airflow/providers/apache/pinot/**/* + - providers/src/airflow/providers/apache/pinot/**/* - docs/apache-airflow-providers-apache-pinot/**/* - - tests/providers/apache/pinot/**/* - - tests/system/providers/apache/pinot/**/* + - providers/tests/apache/pinot/**/* + - providers/tests/system/apache/pinot/**/* provider:apache-spark: - - airflow/providers/apache/spark/**/* + - providers/src/airflow/providers/apache/spark/**/* - docs/apache-airflow-providers-apache-spark/**/* - - tests/providers/apache/spark/**/* - - tests/system/providers/apache/spark/**/* + - providers/tests/apache/spark/**/* + - providers/tests/system/apache/spark/**/* provider:apprise: - - airflow/providers/apprise/**/* + - providers/src/airflow/providers/apprise/**/* - docs/apache-airflow-providers-apprise/**/* - - tests/providers/apprise/**/* + - providers/tests/apprise/**/* provider:arangodb: - - airflow/providers/arangodb/**/* + - providers/src/airflow/providers/arangodb/**/* - docs/apache-airflow-providers-arangodb/**/* - - tests/providers/arangodb/**/* + - providers/tests/arangodb/**/* provider:asana: - - airflow/providers/asana/**/* + - providers/src/airflow/providers/asana/**/* - docs/apache-airflow-providers-asana/**/* - - tests/providers/asana/**/* - - tests/system/providers/asana/**/* + - providers/tests/asana/**/* + - providers/tests/system/asana/**/* provider:atlassian-jira: - - airflow/providers/atlassian/jira/**/* + - providers/src/airflow/providers/atlassian/jira/**/* - docs/apache-airflow-providers-atlassian-jira/**/* - - tests/providers/atlassian/jira/**/* + - providers/tests/atlassian/jira/**/* provider:celery: - - airflow/providers/celery/**/* + - providers/src/airflow/providers/celery/**/* - docs/apache-airflow-providers-celery/**/* - - tests/providers/celery/**/* + - providers/tests/celery/**/* provider:cloudant: - - airflow/providers/cloudant/**/* + - providers/src/airflow/providers/cloudant/**/* - docs/apache-airflow-providers-cloudant/**/* - - tests/providers/cloudant/**/* + - providers/tests/cloudant/**/* provider:cncf-kubernetes: - airflow/**/kubernetes_*.py - airflow/example_dags/example_kubernetes_executor.py - - airflow/providers/cncf/kubernetes/**/* - - airflow/providers/celery/executors/celery_kubernetes_executor.py + - providers/src/airflow/providers/cncf/kubernetes/**/* + - providers/src/airflow/providers/celery/executors/celery_kubernetes_executor.py - docs/apache-airflow-providers-cncf-kubernetes/**/* - kubernetes_tests/**/* - - tests/providers/cncf/kubernetes/**/* - - tests/system/providers/cncf/kubernetes/**/* + - providers/tests/cncf/kubernetes/**/* + - providers/tests/system/cncf/kubernetes/**/* provider:cohere: - - airflow/providers/cohere/**/* + - providers/src/airflow/providers/cohere/**/* - docs/apache-airflow-providers-cohere/**/* - - tests/providers/cohere/**/* - - tests/system/providers/cohere/**/* + - providers/tests/cohere/**/* + - providers/tests/system/cohere/**/* provider:common-compat: - - airflow/providers/common/compat/**/* + - providers/src/airflow/providers/common/compat/**/* - docs/apache-airflow-providers-common-compat/**/* - - tests/providers/common/compat/**/* + - providers/tests/common/compat/**/* provider:common-io: - - airflow/providers/common/io/**/* + - providers/src/airflow/providers/common/io/**/* - docs/apache-airflow-providers-common-io/**/* - - tests/system/providers/common/io/**/* + - providers/tests/system/common/io/**/* provider:common-sql: - - airflow/providers/common/sql/**/* + - providers/src/airflow/providers/common/sql/**/* - docs/apache-airflow-providers-common-sql/**/* - - tests/providers/common/sql/**/* - - tests/system/providers/common/sql/**/* + - providers/tests/common/sql/**/* + - providers/tests/system/common/sql/**/* provider:standard: - - airflow/providers/standard/**/* + - providers/src/airflow/providers/standard/**/* - docs/apache-airflow-providers-standard/**/* - - tests/providers/standard/**/* + - providers/tests/standard/**/* provider:databricks: - - airflow/providers/databricks/**/* + - providers/src/airflow/providers/databricks/**/* - docs/apache-airflow-providers-databricks/**/* - - tests/providers/databricks/**/* - - tests/system/providers/databricks/**/* + - providers/tests/databricks/**/* + - providers/tests/system/databricks/**/* provider:datadog: - - airflow/providers/datadog/**/* + - providers/src/airflow/providers/datadog/**/* - docs/apache-airflow-providers-datadog/**/* - - tests/providers/datadog/**/* + - providers/tests/datadog/**/* provider:dbt-cloud: - - airflow/providers/dbt/cloud/**/* + - providers/src/airflow/providers/dbt/cloud/**/* - docs/apache-airflow-providers-dbt-cloud/**/* - - tests/providers/dbt/cloud/**/* - - tests/system/providers/dbt/cloud/**/* + - providers/tests/dbt/cloud/**/* + - providers/tests/system/dbt/cloud/**/* provider:dingding: - - airflow/providers/dingding/**/* + - providers/src/airflow/providers/dingding/**/* - docs/apache-airflow-providers-dingding/**/* - - tests/providers/dingding/**/* - - tests/system/providers/dingding/**/* + - providers/tests/dingding/**/* + - providers/tests/system/dingding/**/* provider:discord: - - airflow/providers/discord/**/* + - providers/src/airflow/providers/discord/**/* - docs/apache-airflow-providers-discord/**/* - - tests/providers/discord/**/* + - providers/tests/discord/**/* provider:docker: - - airflow/providers/docker/**/* + - providers/src/airflow/providers/docker/**/* - docs/apache-airflow-providers-docker/**/* - - tests/providers/docker/**/* - - tests/system/providers/docker/**/* + - providers/tests/docker/**/* + - providers/tests/system/docker/**/* provider:elasticsearch: - - airflow/providers/elasticsearch/**/* + - providers/src/airflow/providers/elasticsearch/**/* - docs/apache-airflow-providers-elasticsearch/**/* - - tests/providers/elasticsearch/**/* - - tests/system/providers/elasticsearch/**/* + - providers/tests/elasticsearch/**/* + - providers/tests/system/elasticsearch/**/* provider:exasol: - - airflow/providers/exasol/**/* + - providers/src/airflow/providers/exasol/**/* - docs/apache-airflow-providers-exasol/**/* - - tests/providers/exasol/**/* + - providers/tests/exasol/**/* provider:fab: - - airflow/providers/fab/**/* + - providers/src/airflow/providers/fab/**/* - docs/apache-airflow-providers-fab/**/* - - tests/providers/fab/**/* + - providers/tests/fab/**/* provider:facebook: - - airflow/providers/facebook/**/* + - providers/src/airflow/providers/facebook/**/* - docs/apache-airflow-providers-facebook/**/* - - tests/providers/facebook/**/* + - providers/tests/facebook/**/* provider:ftp: - - airflow/providers/ftp/**/* + - providers/src/airflow/providers/ftp/**/* - docs/apache-airflow-providers-ftp/**/* - - tests/providers/ftp/**/* - - tests/system/providers/ftp/**/* + - providers/tests/ftp/**/* + - providers/tests/system/ftp/**/* provider:github: - - airflow/providers/github/**/* + - providers/src/airflow/providers/github/**/* - docs/apache-airflow-providers-github/**/* - - tests/providers/github/**/* - - tests/system/providers/github/**/* + - providers/tests/github/**/* + - providers/tests/system/github/**/* provider:google: - - airflow/providers/google/**/* + - providers/src/airflow/providers/google/**/* - docs/apache-airflow-providers-google/**/* - - tests/providers/google/**/* - - tests/system/providers/google/**/* + - providers/tests/google/**/* + - providers/tests/system/google/**/* provider:grpc: - - airflow/providers/grpc/**/* + - providers/src/airflow/providers/grpc/**/* - docs/apache-airflow-providers-grpc/**/* - - tests/providers/grpc/**/* + - providers/tests/grpc/**/* provider:hashicorp: - - airflow/providers/hashicorp/**/* + - providers/src/airflow/providers/hashicorp/**/* - docs/apache-airflow-providers-hashicorp/**/* - - tests/providers/hashicorp/**/* + - providers/tests/hashicorp/**/* provider:http: - - airflow/providers/http/**/* + - providers/src/airflow/providers/http/**/* - docs/apache-airflow-providers-http/**/* - - tests/providers/http/**/* - - tests/system/providers/http/**/* + - providers/tests/http/**/* + - providers/tests/system/http/**/* provider:imap: - - airflow/providers/imap/**/* + - providers/src/airflow/providers/imap/**/* - docs/apache-airflow-providers-imap/**/* - - tests/providers/imap/**/* + - providers/tests/imap/**/* provider:influxdb: - - airflow/providers/influxdb/**/* + - providers/src/airflow/providers/influxdb/**/* - docs/apache-airflow-providers-influxdb/**/* - - tests/providers/influxdb/**/* - - tests/system/providers/influxdb/**/* + - providers/tests/influxdb/**/* + - providers/tests/system/influxdb/**/* provider:jdbc: - - airflow/providers/jdbc/**/* + - providers/src/airflow/providers/jdbc/**/* - docs/apache-airflow-providers-jdbc/**/* - - tests/providers/jdbc/**/* - - tests/system/providers/jdbc/**/* + - providers/tests/jdbc/**/* + - providers/tests/system/jdbc/**/* provider:jenkins: - - airflow/providers/jenkins/**/* + - providers/src/airflow/providers/jenkins/**/* - docs/apache-airflow-providers-jenkins/**/* - - tests/providers/jenkins/**/* - - tests/system/providers/jenkins/**/* + - providers/tests/jenkins/**/* + - providers/tests/system/jenkins/**/* provider:microsoft-azure: - - airflow/providers/microsoft/azure/**/* - - tests/providers/microsoft/azure/**/* + - providers/src/airflow/providers/microsoft/azure/**/* + - providers/tests/microsoft/azure/**/* - docs/apache-airflow-providers-microsoft-azure/**/* - - tests/system/providers/microsoft/azure/**/* + - providers/tests/system/microsoft/azure/**/* provider:microsoft-mssql: - - airflow/providers/microsoft/mssql/**/* + - providers/src/airflow/providers/microsoft/mssql/**/* - docs/apache-airflow-providers-microsoft-mssql/**/* - - tests/providers/microsoft/mssql/**/* - - tests/system/providers/microsoft/mssql/**/* + - providers/tests/microsoft/mssql/**/* + - providers/tests/system/microsoft/mssql/**/* provider:microsoft-psrp: - - airflow/providers/microsoft/psrp/**/* + - providers/src/airflow/providers/microsoft/psrp/**/* - docs/apache-airflow-providers-microsoft-psrp/**/* - - tests/providers/microsoft/psrp/**/* + - providers/tests/microsoft/psrp/**/* provider:microsoft-winrm: - - airflow/providers/microsoft/winrm/**/* + - providers/src/airflow/providers/microsoft/winrm/**/* - docs/apache-airflow-providers-microsoft-winrm/**/* - - tests/providers/microsoft/winrm/**/* - - tests/system/providers/microsoft/winrm/**/* + - providers/tests/microsoft/winrm/**/* + - providers/tests/system/microsoft/winrm/**/* provider:mongo: - - airflow/providers/mongo/**/* + - providers/src/airflow/providers/mongo/**/* - docs/apache-airflow-providers-mongo/**/* - - tests/providers/mongo/**/* + - providers/tests/mongo/**/* provider:mysql: - - airflow/providers/mysql/**/* + - providers/src/airflow/providers/mysql/**/* - docs/apache-airflow-providers-mysql/**/* - - tests/providers/mysql/**/* - - tests/system/providers/mysql/**/* + - providers/tests/mysql/**/* + - providers/tests/system/mysql/**/* provider:neo4j: - - airflow/providers/neo4j/**/* + - providers/src/airflow/providers/neo4j/**/* - docs/apache-airflow-providers-neo4j/**/* - - tests/providers/neo4j/**/* - - tests/system/providers/neo4j/**/* + - providers/tests/neo4j/**/* + - providers/tests/system/neo4j/**/* provider:odbc: - - airflow/providers/odbc/**/* + - providers/src/airflow/providers/odbc/**/* - docs/apache-airflow-providers-odbc/**/* - - tests/providers/odbc/**/* + - providers/tests/odbc/**/* provider:openai: - - airflow/providers/openai/**/* + - providers/src/airflow/providers/openai/**/* - docs/apache-airflow-providers-openai/**/* - - tests/providers/openai/**/* - - tests/system/providers/openai/**/* + - providers/tests/openai/**/* + - providers/tests/system/openai/**/* provider:openfaas: - - airflow/providers/openfaas/**/* + - providers/src/airflow/providers/openfaas/**/* - docs/apache-airflow-providers-openfaas/**/* - - tests/providers/openfaas/**/* + - providers/tests/openfaas/**/* provider:openlineage: - - airflow/providers/openlineage/**/* + - providers/src/airflow/providers/openlineage/**/* - docs/apache-airflow-providers-openlineage/**/* - - tests/providers/openlineage/**/* + - providers/tests/openlineage/**/* provider:opensearch: - - airflow/providers/opensearch/**/* + - providers/src/airflow/providers/opensearch/**/* - docs/apache-airflow-providers-opensearch/**/* - - tests/providers/opensearch/**/* - - tests/system/providers/opensearch/**/* + - providers/tests/opensearch/**/* + - providers/tests/system/opensearch/**/* provider:opsgenie: - - airflow/providers/opsgenie/**/* + - providers/src/airflow/providers/opsgenie/**/* - docs/apache-airflow-providers-opsgenie/**/* - - tests/providers/opsgenie/**/* - - tests/system/providers/opsgenie/**/* + - providers/tests/opsgenie/**/* + - providers/tests/system/opsgenie/**/* provider:Oracle: - - airflow/providers/oracle/**/* + - providers/src/airflow/providers/oracle/**/* - docs/apache-airflow-providers-oracle/**/* - - tests/providers/oracle/**/* + - providers/tests/oracle/**/* provider:pagerduty: - - airflow/providers/pagerduty/**/* + - providers/src/airflow/providers/pagerduty/**/* - docs/apache-airflow-providers-pagerduty/**/* - - tests/providers/pagerduty/**/* + - providers/tests/pagerduty/**/* provider:papermill: - - airflow/providers/papermill/**/* + - providers/src/airflow/providers/papermill/**/* - docs/apache-airflow-providers-papermill/**/* - - tests/providers/papermill/**/* - - tests/system/providers/papermill/**/* + - providers/tests/papermill/**/* + - providers/tests/system/papermill/**/* provider:pgvector: - - airflow/providers/pgvector/**/* + - providers/src/airflow/providers/pgvector/**/* - docs/apache-airflow-providers-pgvector/**/* - - tests/providers/pgvector/**/* - - tests/system/providers/pgvector/**/* + - providers/tests/pgvector/**/* + - providers/tests/system/pgvector/**/* provider:pinecone: - - airflow/providers/pinecone/**/* + - providers/src/airflow/providers/pinecone/**/* - docs/apache-airflow-providers-pinecone/**/* - - tests/providers/pinecone/**/* - - tests/system/providers/pinecone/**/* + - providers/tests/pinecone/**/* + - providers/tests/system/pinecone/**/* provider:postgres: - - airflow/providers/postgres/**/* + - providers/src/airflow/providers/postgres/**/* - docs/apache-airflow-providers-postgres/**/* - - tests/providers/postgres/**/* - - tests/system/providers/postgres/**/* + - providers/tests/postgres/**/* + - providers/tests/system/postgres/**/* provider:presto: - - airflow/providers/presto/**/* + - providers/src/airflow/providers/presto/**/* - docs/apache-airflow-providers-presto/**/* - - tests/providers/presto/**/* - - tests/system/providers/presto/**/* + - providers/tests/presto/**/* + - providers/tests/system/presto/**/* provider:qdrant: - - airflow/providers/qdrant/**/* + - providers/src/airflow/providers/qdrant/**/* - docs/apache-airflow-providers-qdrant/**/* - - tests/providers/qdrant/**/* - - tests/system/providers/qdrant/**/* + - providers/tests/qdrant/**/* + - providers/tests/system/qdrant/**/* provider:redis: - - airflow/providers/redis/**/* + - providers/src/airflow/providers/redis/**/* - docs/apache-airflow-providers-redis/**/* - - tests/providers/redis/**/* + - providers/tests/redis/**/* provider:salesforce: - - airflow/providers/salesforce/**/* + - providers/src/airflow/providers/salesforce/**/* - docs/apache-airflow-providers-salesforce/**/* - - tests/providers/salesforce/**/* - - tests/system/providers/salesforce/**/* + - providers/tests/salesforce/**/* + - providers/tests/system/salesforce/**/* provider:samba: - - airflow/providers/samba/**/* + - providers/src/airflow/providers/samba/**/* - docs/apache-airflow-providers-samba/**/* - - tests/providers/samba/**/* + - providers/tests/samba/**/* provider:segment: - - airflow/providers/segment/**/* + - providers/src/airflow/providers/segment/**/* - docs/apache-airflow-providers-segment/**/* - - tests/providers/segment/**/* + - providers/tests/segment/**/* provider:sendgrid: - - airflow/providers/segment/**/* + - providers/src/airflow/providers/segment/**/* - docs/apache-airflow-providers-segment/**/* - - tests/providers/segment/**/* + - providers/tests/segment/**/* provider:sftp: - - airflow/providers/sftp/**/* + - providers/src/airflow/providers/sftp/**/* - docs/apache-airflow-providers-sftp/**/* - - tests/providers/sftp/**/* + - providers/tests/sftp/**/* provider:singularity: - - airflow/providers/singularity/**/* + - providers/src/airflow/providers/singularity/**/* - docs/apache-airflow-providers-singularity/**/* - - tests/providers/singularity/**/* - - tests/system/providers/singularity/**/* + - providers/tests/singularity/**/* + - providers/tests/system/singularity/**/* provider:slack: - - airflow/providers/slack/**/* + - providers/src/airflow/providers/slack/**/* - docs/apache-airflow-providers-slack/**/* - - tests/providers/slack/**/* - - tests/system/providers/slack/**/* + - providers/tests/slack/**/* + - providers/tests/system/slack/**/* provider:smtp: - - airflow/providers/smtp/**/* + - providers/src/airflow/providers/smtp/**/* - docs/apache-airflow-providers-smtp/**/* - - tests/providers/smtp/**/* + - providers/tests/smtp/**/* provider:snowflake: - - airflow/providers/snowflake/**/* + - providers/src/airflow/providers/snowflake/**/* - docs/apache-airflow-providers-snowflake/**/* - - tests/providers/snowflake/**/* - - tests/system/providers/snowflake/**/* + - providers/tests/snowflake/**/* + - providers/tests/system/snowflake/**/* provider:sqlite: - - airflow/providers/sqlite/**/* + - providers/src/airflow/providers/sqlite/**/* - docs/apache-airflow-providers-sqlite/**/* - - tests/providers/sqlite/**/* - - tests/system/providers/sqlite/**/* + - providers/tests/sqlite/**/* + - providers/tests/system/sqlite/**/* provider:ssh: - - airflow/providers/ssh/**/* + - providers/src/airflow/providers/ssh/**/* - docs/apache-airflow-providers-ssh/**/* - - tests/providers/ssh/**/* + - providers/tests/ssh/**/* provider:tableau: - - airflow/providers/tableau/**/* + - providers/src/airflow/providers/tableau/**/* - docs/apache-airflow-providers-tableau/**/* - - tests/providers/tableau/**/* - - tests/system/providers/tableau/**/* + - providers/tests/tableau/**/* + - providers/tests/system/tableau/**/* provider:telegram: - - airflow/providers/telegram/**/* + - providers/src/airflow/providers/telegram/**/* - docs/apache-airflow-providers-telegram/**/* - - tests/providers/telegram/**/* - - tests/system/providers/telegram/**/* + - providers/tests/telegram/**/* + - providers/tests/system/telegram/**/* provider:teradata: - - airflow/providers/teradata/**/* + - providers/src/airflow/providers/teradata/**/* - docs/apache-airflow-providers-teradata/**/* - - tests/providers/teradata/**/* - - tests/system/providers/teradata/**/* + - providers/tests/teradata/**/* + - providers/tests/system/teradata/**/* provider:trino: - - airflow/providers/trino/**/* + - providers/src/airflow/providers/trino/**/* - docs/apache-airflow-providers-trino/**/* - - tests/providers/trino/**/* - - tests/system/providers/trino/**/* + - providers/tests/trino/**/* + - providers/tests/system/trino/**/* provider:vertica: - - airflow/providers/vertica/**/* + - providers/src/airflow/providers/vertica/**/* - docs/apache-airflow-providers-vertica/**/* - - tests/providers/vertica/**/* + - providers/tests/vertica/**/* provider:weaviate: - - airflow/providers/weaviate/**/* + - providers/src/airflow/providers/weaviate/**/* - docs/apache-airflow-providers-weaviate/**/* - - tests/providers/weaviate/**/* - - tests/system/providers/weaviate/**/* + - providers/tests/weaviate/**/* + - providers/tests/system/weaviate/**/* provider:yandex: - - airflow/providers/yandex/**/* + - providers/src/airflow/providers/yandex/**/* - docs/apache-airflow-providers-yandex/**/* - - tests/providers/yandex/**/* - - tests/system/providers/yandex/**/* + - providers/tests/yandex/**/* + - providers/tests/system/yandex/**/* provider:ydb: - - airflow/providers/ydb/**/* + - providers/src/airflow/providers/ydb/**/* - docs/apache-airflow-providers-ydb/**/* - - tests/providers/ydb/**/* - - tests/system/providers/ydb/**/* + - providers/tests/ydb/**/* + - providers/tests/system/ydb/**/* provider:zendesk: - - airflow/providers/zendesk/**/* + - providers/src/airflow/providers/zendesk/**/* - docs/apache-airflow-providers-zendesk/**/* - - tests/providers/zendesk/**/* - - tests/system/providers/zendesk/**/* + - providers/tests/zendesk/**/* + - providers/tests/system/zendesk/**/* area:providers: - - airflow/providers/**/* + - providers/src/airflow/providers/**/* - docs/apache-airflow-providers-*/**/* - - tests/providers/**/* - - tests/system/providers/**/* + - providers/tests/**/* + - providers/tests/system/**/* area:API: - airflow/api/**/* @@ -611,10 +611,10 @@ labelPRBasedOnFilePath: - docs/apache-airflow/administration-and-deployment/lineage.rst area:Logging: - - airflow/providers/**/log/* + - providers/src/airflow/providers/**/log/* - airflow/utils/log/**/* - docs/apache-airflow/administration-and-deployment/logging-monitoring/logging-*.rst - - tests/providers/**/log/* + - providers/tests/**/log/* - tests/utils/log/**/* area:Plugins: @@ -638,9 +638,9 @@ labelPRBasedOnFilePath: area:Secrets: - airflow/secrets/**/* - - airflow/providers/**/secrets/* + - providers/src/airflow/providers/**/secrets/* - tests/secrets/**/* - - tests/providers/**/secrets/* + - providers/tests/**/secrets/* - docs/apache-airflow/security/secrets/**/* area:Triggerer: diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index 49d6a7245bc88..509d7066f6d38 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -195,9 +195,10 @@ jobs: env: HATCH_ENV: "test" working-directory: ./clients/python - - name: "Prepare FAB provider packages: wheel" + - name: "Prepare FAB+standard provider packages: wheel" run: > - breeze release-management prepare-provider-packages fab --package-format wheel --skip-tag-check + breeze release-management prepare-provider-packages fab standard \ + --package-format wheel --skip-tag-check - name: "Install Airflow with fab for webserver tests" run: pip install . dist/apache_airflow_providers_fab-*.whl - name: "Prepare Standard provider packages: wheel" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ce557dba431b6..748a85e860bb7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -172,7 +172,7 @@ repos: name: Check and update common.sql API stubs entry: ./scripts/ci/pre_commit/update_common_sql_api_stubs.py language: python - files: ^scripts/ci/pre_commit/update_common_sql_api\.py|^airflow/providers/common/sql/.*\.pyi?$ + files: ^scripts/ci/pre_commit/update_common_sql_api\.py|^providers/src/airflow/providers/common/sql/.*\.pyi?$ additional_dependencies: ['rich>=12.4.4', 'mypy==1.9.0', 'black==23.10.0', 'jinja2'] pass_filenames: false require_serial: true @@ -225,7 +225,7 @@ repos: entry: ./scripts/ci/pre_commit/check_deferrable_default.py pass_filenames: false additional_dependencies: ["libcst>=1.1.0"] - files: ^airflow/.*/sensors/.*\.py$|^airflow/.*/operators/.*\.py$ + files: ^(providers/src/)?airflow/.*/(sensors|operators)/.*\.py$ - repo: https://github.com/asottile/blacken-docs rev: 1.18.0 hooks: @@ -326,7 +326,7 @@ repos: exclude: ^.*/.*_vendor/|^airflow/www/static/css/material-icons\.css$|^images/.*$|^RELEASE_NOTES\.txt$|^.*package-lock\.json$|^.*/kinglear\.txt$|^.*pnpm-lock\.yaml$ args: - --ignore-words=docs/spelling_wordlist.txt - - --skip=airflow/providers/*/*.rst,airflow/www/*.log,docs/*/commits.rst,docs/apache-airflow/tutorial/pipeline_example.csv,*.min.js,*.lock,INTHEWILD.md + - --skip=providers/src/airflow/providers/*/*.rst,airflow/www/*.log,docs/*/commits.rst,docs/apache-airflow/tutorial/pipeline_example.csv,*.min.js,*.lock,INTHEWILD.md - --exclude-file=.codespellignorelines - repo: local # Note that this is the 2nd "local" repo group in the .pre-commit-config.yaml file. This is because @@ -343,7 +343,7 @@ repos: language: python entry: ./scripts/ci/pre_commit/validate_operators_init.py pass_filenames: true - files: ^airflow/providers/.*/(operators|transfers|sensors)/.*\.py$ + files: ^providers/src/airflow/providers/.*/(operators|transfers|sensors)/.*\.py$ additional_dependencies: [ 'rich>=12.4.4' ] - id: ruff name: Run 'ruff' for extremely fast Python linting @@ -418,7 +418,7 @@ repos: language: python files: ^airflow/.*\.py$ require_serial: true - exclude: ^airflow/kubernetes/|^airflow/providers/ + exclude: ^airflow/kubernetes/|^providers/src/airflow/providers/ entry: ./scripts/ci/pre_commit/check_cncf_k8s_used_for_k8s_executor_only.py additional_dependencies: ['rich>=12.4.4'] - id: check-airflow-provider-compatibility @@ -426,7 +426,7 @@ repos: entry: ./scripts/ci/pre_commit/check_provider_airflow_compatibility.py language: python pass_filenames: true - files: ^airflow/providers/.*\.py$ + files: ^providers/src/airflow/providers/.*\.py$ additional_dependencies: ['rich>=12.4.4'] - id: check-google-re2-as-dependency name: Check google-re2 declared as dep @@ -435,7 +435,7 @@ repos: language: python pass_filenames: true require_serial: true - files: ^airflow/providers/.*\.py$ + files: ^providers/src/airflow/providers/.*\.py$ additional_dependencies: ['rich>=12.4.4'] - id: update-local-yml-file name: Update mounts in the local yml file @@ -449,13 +449,18 @@ repos: description: Check dependency of SQL Providers with common data structure entry: ./scripts/ci/pre_commit/check_common_sql_dependency.py language: python - files: ^airflow/providers/.*/hooks/.*\.py$ + files: ^providers/src/airflow/providers/.*/hooks/.*\.py$ additional_dependencies: ['rich>=12.4.4', 'pyyaml', 'packaging'] - id: update-providers-dependencies name: Update dependencies for provider packages entry: ./scripts/ci/pre_commit/update_providers_dependencies.py language: python - files: ^airflow/providers/.*\.py$|^airflow/providers/.*/provider\.yaml$|^tests/providers/.*\.py$|^tests/system/providers/.*\.py$|^scripts/ci/pre_commit/update_providers_dependencies\.py$ + files: | + (?x) + ^providers/src/airflow/providers/.*\.py$ | + ^providers/src/airflow/providers/.*/provider\.yaml$ | + ^providers/tests/.*\.py$ | + ^scripts/ci/pre_commit/update_providers_dependencies\.py$ pass_filenames: false additional_dependencies: ['setuptools', 'rich>=12.4.4', 'pyyaml', 'tomli'] - id: check-extra-packages-references @@ -477,7 +482,7 @@ repos: name: Update extras in documentation entry: ./scripts/ci/pre_commit/insert_extras.py language: python - files: ^contributing-docs/12_airflow_dependencies_and_extras.rst$|^INSTALL$|^airflow/providers/.*/provider\.yaml$|^Dockerfile.* + files: ^contributing-docs/12_airflow_dependencies_and_extras.rst$|^INSTALL$|^providers/src/airflow/providers/.*/provider\.yaml$|^Dockerfile.* pass_filenames: false additional_dependencies: ['rich>=12.4.4', 'hatchling==1.25.0'] - id: check-extras-order @@ -510,9 +515,9 @@ repos: (?x) ^scripts/ci/pre_commit/version_heads_map\.py$| ^airflow/migrations/versions/.*$|^airflow/migrations/versions| - ^airflow/providers/fab/migrations/versions/.*$|^airflow/providers/fab/migrations/versions| + ^providers/src/airflow/providers/fab/migrations/versions/.*$|^providers/src/airflow/providers/fab/migrations/versions| ^airflow/utils/db.py$| - ^airflow/providers/fab/auth_manager/models/db.py$ + ^providers/src/airflow/providers/fab/auth_manager/models/db.py$ additional_dependencies: ['packaging','google-re2'] - id: update-version name: Update versions in docs @@ -533,11 +538,11 @@ repos: language: pygrep entry: > (?i) - .*https://github.*[0-9]/tests/system/providers| - .*https://github.*/main/tests/system/providers| - .*https://github.*/master/tests/system/providers| - .*https://github.*/main/airflow/providers/.*/example_dags/| - .*https://github.*/master/airflow/providers/.*/example_dags/ + .*https://github.*[0-9]/providers/tests/system/| + .*https://github.*/main/providers/tests/system/| + .*https://github.*/master/providers/tests/system/| + .*https://github.*/main/providers/src/airflow/providers/.*/example_dags/| + .*https://github.*/master/providers/src/airflow/providers/.*/example_dags/ pass_filenames: true files: ^docs/apache-airflow-providers-.*\.rst - id: check-safe-filter-usage-in-html @@ -560,7 +565,7 @@ repos: description: Use AirflowProviderDeprecationWarning in providers entry: "^\\s*DeprecationWarning*" pass_filenames: true - files: ^airflow/providers/.*\.py$ + files: ^providers/src/airflow/providers/.*\.py$ - id: check-urlparse-usage-in-code language: pygrep name: Don't use urlparse in code @@ -601,28 +606,28 @@ repos: ^airflow/config_templates/| ^airflow/models/baseoperator.py$| ^airflow/operators/__init__.py$| - ^airflow/providers/amazon/aws/hooks/emr.py$| - ^airflow/providers/amazon/aws/operators/emr.py$| - ^airflow/providers/apache/cassandra/hooks/cassandra.py$| - ^airflow/providers/apache/hive/operators/hive_stats.py$| - ^airflow/providers/apache/hive/transfers/vertica_to_hive.py$| - ^airflow/providers/apache/spark/decorators/| - ^airflow/providers/apache/spark/hooks/| - ^airflow/providers/apache/spark/operators/| - ^airflow/providers/exasol/hooks/exasol.py$| - ^airflow/providers/fab/auth_manager/security_manager/| - ^airflow/providers/google/cloud/hooks/bigquery.py$| - ^airflow/providers/google/cloud/operators/cloud_build.py$| - ^airflow/providers/google/cloud/operators/dataproc.py$| - ^airflow/providers/google/cloud/operators/mlengine.py$| - ^airflow/providers/microsoft/azure/hooks/cosmos.py$| - ^airflow/providers/microsoft/winrm/hooks/winrm.py$| + ^providers/src/airflow/providers/amazon/aws/hooks/emr.py$| + ^providers/src/airflow/providers/amazon/aws/operators/emr.py$| + ^providers/src/airflow/providers/apache/cassandra/hooks/cassandra.py$| + ^providers/src/airflow/providers/apache/hive/operators/hive_stats.py$| + ^providers/src/airflow/providers/apache/hive/transfers/vertica_to_hive.py$| + ^providers/src/airflow/providers/apache/spark/decorators/| + ^providers/src/airflow/providers/apache/spark/hooks/| + ^providers/src/airflow/providers/apache/spark/operators/| + ^providers/src/airflow/providers/exasol/hooks/exasol.py$| + ^providers/src/airflow/providers/fab/auth_manager/security_manager/| + ^providers/src/airflow/providers/google/cloud/hooks/bigquery.py$| + ^providers/src/airflow/providers/google/cloud/operators/cloud_build.py$| + ^providers/src/airflow/providers/google/cloud/operators/dataproc.py$| + ^providers/src/airflow/providers/google/cloud/operators/mlengine.py$| + ^providers/src/airflow/providers/microsoft/azure/hooks/cosmos.py$| + ^providers/src/airflow/providers/microsoft/winrm/hooks/winrm.py$| ^airflow/www/fab_security/manager.py$| ^docs/.*commits.rst$| ^docs/apache-airflow-providers-apache-cassandra/connections/cassandra.rst$| - ^airflow/providers/microsoft/winrm/operators/winrm.py$| - ^airflow/providers/opsgenie/hooks/opsgenie.py$| - ^airflow/providers/redis/provider.yaml$| + ^providers/src/airflow/providers/microsoft/winrm/operators/winrm.py$| + ^providers/src/airflow/providers/opsgenie/hooks/opsgenie.py$| + ^providers/src/airflow/providers/redis/provider.yaml$| ^airflow/serialization/serialized_objects.py$| ^airflow/ui/pnpm-lock.yaml$| ^airflow/utils/db.py$| @@ -647,6 +652,7 @@ repos: ^newsfragments/41761.significant.rst$| ^scripts/ci/pre_commit/vendor_k8s_json_schema.py$| ^tests/| + ^providers/tests/| ^.pre-commit-config\.yaml$| ^.*CHANGELOG\.(rst|txt)$| ^.*RELEASE_NOTES\.rst$| @@ -682,7 +688,7 @@ repos: ^airflow/decorators/.*$| ^airflow/hooks/.*$| ^airflow/operators/.*$| - ^airflow/providers/.*$| + ^providers/src/airflow/providers/.*$| ^airflow/sensors/.*$| ^dev/provider_packages/.*$ - id: check-base-operator-usage @@ -697,7 +703,7 @@ repos: ^airflow/decorators/.*$| ^airflow/hooks/.*$| ^airflow/operators/.*$| - ^airflow/providers/.*$| + ^providers/src/airflow/providers/.*$| ^airflow/sensors/.*$| ^dev/provider_packages/.*$ - id: check-base-operator-usage @@ -708,16 +714,16 @@ repos: pass_filenames: true files: > (?x) - ^airflow/providers/.*\.py$ - exclude: ^.*/.*_vendor/|airflow/providers/standard/operators/bash.py + ^providers/src/airflow/providers/.*\.py$ + exclude: ^.*/.*_vendor/|providers/src/airflow/providers/standard/operators/bash.py - id: check-get-lineage-collector-providers language: python name: Check providers import hook lineage code from compat description: Make sure you import from airflow.provider.common.compat.lineage.hook instead of airflow.lineage.hook. entry: ./scripts/ci/pre_commit/check_get_lineage_collector_providers.py - files: ^airflow/providers/.*\.py$ - exclude: ^airflow/providers/common/compat/.*\.py$ + files: ^providers/src/airflow/providers/.*\.py$ + exclude: ^providers/src/airflow/providers/common/compat/.*\.py$ additional_dependencies: [ 'rich>=12.4.4' ] - id: check-decorated-operator-implements-custom-name name: Check @task decorator implements custom_operator_name @@ -730,7 +736,7 @@ repos: name: Verify usage of Airflow deprecation classes in core entry: category=DeprecationWarning|category=PendingDeprecationWarning files: \.py$ - exclude: ^airflow/configuration\.py$|^airflow/providers|^scripts/in_container/verify_providers\.py$|^tests/.*$ + exclude: ^airflow/configuration\.py$|^providers/src/airflow/providers/|^scripts/in_container/verify_providers\.py$|^(providers/)?tests/.*$|^dev/tests_common/ pass_filenames: true - id: check-provide-create-sessions-imports language: pygrep @@ -772,7 +778,7 @@ repos: name: Check if aiobotocore is an optional dependency only entry: ./scripts/ci/pre_commit/check_aiobotocore_optional.py language: python - files: ^airflow/providers/.*/provider\.yaml$ + files: ^providers/src/airflow/providers/.*/provider\.yaml$ pass_filenames: true additional_dependencies: ['click', 'rich>=12.4.4', 'pyyaml'] require_serial: true @@ -877,12 +883,6 @@ repos: entry: ./scripts/ci/pre_commit/compile_www_assets_dev.py pass_filenames: false additional_dependencies: ['yarn@1.22.21'] - - id: check-providers-init-file-missing - name: Provider init file is missing - pass_filenames: false - always_run: true - entry: ./scripts/ci/pre_commit/check_providers_init.py - language: python - id: check-providers-subpackages-init-file-exist name: Provider subpackage init files are there pass_filenames: false @@ -944,8 +944,8 @@ repos: name: Check if system tests have required segments of code entry: ./scripts/ci/pre_commit/check_system_tests.py language: python - files: ^tests/system/.*/example_[^/]*\.py$ - exclude: ^tests/system/providers/google/cloud/bigquery/example_bigquery_queries\.py$ + files: ^(providers/)?tests/system/.*/example_[^/]*\.py$ + exclude: ^providers/tests/system/google/cloud/bigquery/example_bigquery_queries\.py$ pass_filenames: true additional_dependencies: ['rich>=12.4.4'] - id: generate-pypi-readme @@ -1177,12 +1177,21 @@ repos: entry: "^\\s*from re\\s|^\\s*import re\\s" pass_filenames: true files: \.py$ - exclude: ^airflow/providers|^dev/.*\.py$|^scripts/.*\.py$|^tests/|^\w+_tests/|^docs/.*\.py$|^airflow/utils/helpers.py$|^hatch_build.py$ + exclude: | + (?x) + ^airflow/utils/helpers.py$ | + ^providers/src/airflow/providers/ | + ^(providers/)?tests/ | + ^dev/.*\.py$ | + ^scripts/.*\.py$ | + ^\w+_tests/ | + ^docs/.*\.py$ | + ^hatch_build.py$ - id: check-provider-docs-valid name: Validate provider doc files entry: ./scripts/ci/pre_commit/check_provider_docs.py language: python - files: ^airflow/providers/.*/provider\.yaml|^docs/.* + files: ^providers/src/airflow/providers/.*/provider\.yaml|^docs/.* additional_dependencies: ['rich>=12.4.4', 'pyyaml', 'jinja2'] require_serial: true - id: bandit @@ -1273,9 +1282,20 @@ repos: - id: mypy-airflow name: Run mypy for airflow language: python - entry: ./scripts/ci/pre_commit/mypy.py --namespace-packages + entry: ./scripts/ci/pre_commit/mypy.py files: \.py$ - exclude: ^.*/.*_vendor/|^airflow/migrations|^airflow/providers|^dev|^scripts|^docs|^provider_packages|^tests/providers|^tests/system/providers|^tests/dags/test_imports.py|^clients/python/test_.*\.py|^performance + exclude: | + (?x) + ^.*/.*_vendor/ | + ^airflow/migrations | + ^providers/ | + ^dev | + ^scripts | + ^docs | + ^provider_packages | + ^performance/ | + ^tests/dags/test_imports.py | + ^clients/python/test_.*\.py require_serial: true additional_dependencies: ['rich>=12.4.4'] - id: mypy-airflow @@ -1291,7 +1311,7 @@ repos: name: Run mypy for providers language: python entry: ./scripts/ci/pre_commit/mypy.py --namespace-packages - files: ^airflow/providers/.*\.py$|^tests/providers/.*\.py$|^tests/system/providers/.*\.py$ + files: ^providers/src/airflow/providers/.*\.py$|^providers/tests//.*\.py$ exclude: ^.*/.*_vendor/ require_serial: true additional_dependencies: ['rich>=12.4.4'] @@ -1299,7 +1319,7 @@ repos: stages: ['manual'] name: Run mypy for providers (manual) language: python - entry: ./scripts/ci/pre_commit/mypy_folder.py airflow/providers + entry: ./scripts/ci/pre_commit/mypy_folder.py providers/src/airflow/providers pass_filenames: false files: ^.*\.py$ require_serial: true @@ -1325,14 +1345,14 @@ repos: name: Validate provider.yaml files entry: ./scripts/ci/pre_commit/check_provider_yaml_files.py language: python - files: ^airflow/providers/.*/provider\.yaml$ + files: ^providers/src/airflow/providers/.*/provider\.yaml$ additional_dependencies: ['rich>=12.4.4'] require_serial: true - id: check-template-fields-valid name: Check templated fields mapped in operators/sensors language: python entry: ./scripts/ci/pre_commit/check_template_fields.py - files: ^airflow/.*/sensors/.*\.py$|^airflow/.*/operators/.*\.py$ + files: ^(providers/src/)?airflow/.*/(sensors|operators)/.*\.py$ additional_dependencies: [ 'rich>=12.4.4' ] require_serial: true - id: update-migration-references diff --git a/Dockerfile b/Dockerfile index 4525a717728a8..ce229c75facef 100644 --- a/Dockerfile +++ b/Dockerfile @@ -877,8 +877,13 @@ function install_airflow() { # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method local installation_command_flags if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then + # We need _a _ file in there otherwise the editable install doesn't include anything in the .pth file + mkdir -p ./providers/src/airflow/providers/ + touch ./providers/src/airflow/providers/__init__.py + trap 'rm -f ./providers/src/airflow/providers/__init__.py 2>/dev/null' EXIT + # When installing from sources - we always use `--editable` mode - installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" + installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then diff --git a/Dockerfile.ci b/Dockerfile.ci index 304d2a4a2d46d..a3e982614de69 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -676,8 +676,13 @@ function install_airflow() { # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method local installation_command_flags if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then + # We need _a _ file in there otherwise the editable install doesn't include anything in the .pth file + mkdir -p ./providers/src/airflow/providers/ + touch ./providers/src/airflow/providers/__init__.py + trap 'rm -f ./providers/src/airflow/providers/__init__.py 2>/dev/null' EXIT + # When installing from sources - we always use `--editable` mode - installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" + installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then @@ -1350,6 +1355,7 @@ RUN bash /scripts/docker/install_pipx_tools.sh # We copy pyproject.toml and other files needed to perform setup of dependencies # So in case pyproject.toml changes we can install latest dependencies required. COPY pyproject.toml ${AIRFLOW_SOURCES}/pyproject.toml +COPY providers/pyproject.toml ${AIRFLOW_SOURCES}/providers/pyproject.toml COPY airflow/__init__.py ${AIRFLOW_SOURCES}/airflow/ COPY generated/* ${AIRFLOW_SOURCES}/generated/ COPY constraints/* ${AIRFLOW_SOURCES}/constraints/ diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 246e0e95ec617..2dc425daa0549 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -113,7 +113,6 @@ clear_task_instances, ) from airflow.models.tasklog import LogTemplate -from airflow.providers.fab import __version__ as FAB_VERSION from airflow.secrets.local_filesystem import LocalFilesystemBackend from airflow.security import permissions from airflow.settings import json @@ -796,6 +795,9 @@ def _upgrade_outdated_dag_access_control(access_control=None): """Look for outdated dag level actions in DAG access_controls and replace them with updated actions.""" if access_control is None: return None + + from airflow.providers.fab import __version__ as FAB_VERSION + updated_access_control = {} for role, perms in access_control.items(): if packaging_version.parse(FAB_VERSION) >= packaging_version.parse("1.3.0"): diff --git a/airflow/providers/.gitignore b/airflow/providers/.gitignore deleted file mode 100644 index 9b4a1a9d8f3ed..0000000000000 --- a/airflow/providers/.gitignore +++ /dev/null @@ -1 +0,0 @@ -get_provider_info.py diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py index 2c673063cb23e..1b1ca469f21d5 100644 --- a/airflow/providers_manager.py +++ b/airflow/providers_manager.py @@ -31,7 +31,7 @@ from dataclasses import dataclass from functools import wraps from time import perf_counter -from typing import TYPE_CHECKING, Any, Callable, MutableMapping, NamedTuple, NoReturn, TypeVar +from typing import TYPE_CHECKING, Any, Callable, MutableMapping, NamedTuple, TypeVar from packaging.utils import canonicalize_name @@ -362,7 +362,7 @@ def _correctness_check(provider_package: str, class_name: str, provider_info: Pr # We want to have better control over initialization of parameters and be able to debug and test it # So we add our own decorator -def provider_info_cache(cache_name: str) -> Callable[[Callable[PS, NoReturn]], Callable[PS, None]]: +def provider_info_cache(cache_name: str) -> Callable[[Callable[PS, None]], Callable[PS, None]]: """ Decorate and cache provider info. @@ -370,7 +370,7 @@ def provider_info_cache(cache_name: str) -> Callable[[Callable[PS, NoReturn]], C :param cache_name: Name of the cache """ - def provider_info_cache_decorator(func: Callable[PS, NoReturn]) -> Callable[PS, None]: + def provider_info_cache_decorator(func: Callable[PS, None]) -> Callable[PS, None]: @wraps(func) def wrapped_function(*args: PS.args, **kwargs: PS.kwargs) -> None: providers_manager_instance = args[0] diff --git a/airflow/settings.py b/airflow/settings.py index 7a805f64a29c7..a6adbbcf9ff77 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -377,7 +377,8 @@ def is_called_from_test_code(self) -> tuple[bool, traceback.FrameSummary | None] and not tb.filename == AIRFLOW_UTILS_SESSION_PATH ] if any( - filename.endswith("conftest.py") or filename.endswith("tests/test_utils/db.py") + filename.endswith("conftest.py") + or filename.endswith("dev/airflow_common_pytest/test_utils/db.py") for filename, _, _, _ in airflow_frames ): # This is a fixture call or testing utilities diff --git a/contributing-docs/08_static_code_checks.rst b/contributing-docs/08_static_code_checks.rst index 422a9f027e1ed..f064a13a773b6 100644 --- a/contributing-docs/08_static_code_checks.rst +++ b/contributing-docs/08_static_code_checks.rst @@ -212,8 +212,6 @@ require Breeze Docker image to be built locally. +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-provider-yaml-valid | Validate provider.yaml files | * | +-----------------------------------------------------------+--------------------------------------------------------+---------+ -| check-providers-init-file-missing | Provider init file is missing | | -+-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-providers-subpackages-init-file-exist | Provider subpackage init files are there | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-pydevd-left-in-code | Check for pydevd debug statements accidentally left | | diff --git a/contributing-docs/testing/system_tests.rst b/contributing-docs/testing/system_tests.rst index 867b89582f165..cc64d25e90cbc 100644 --- a/contributing-docs/testing/system_tests.rst +++ b/contributing-docs/testing/system_tests.rst @@ -35,7 +35,7 @@ Airflow system tests are pretty special because they serve three purposes: Old System Tests ---------------- -The system tests derive from the ``tests.test_utils.system_test_class.SystemTests`` class. +The system tests derive from the ``dev.tests_common.test_utils.system_test_class.SystemTests`` class. Old versions of System tests should also be marked with ``@pytest.marker.system(SYSTEM)`` where ``system`` designates the system to be tested (for example, ``google.cloud``). These tests are skipped by default. diff --git a/contributing-docs/testing/unit_tests.rst b/contributing-docs/testing/unit_tests.rst index dff3835a02fba..468f10b68d4c5 100644 --- a/contributing-docs/testing/unit_tests.rst +++ b/contributing-docs/testing/unit_tests.rst @@ -952,7 +952,7 @@ will ask you to rebuild the image if it is needed and some new dependencies shou .. code-block:: bash - breeze testing tests tests/providers/http/hooks/test_http.py tests/core/test_core.py --db-reset --log-cli-level=DEBUG + breeze testing tests providers/tests/http/hooks/test_http.py tests/core/test_core.py --db-reset --log-cli-level=DEBUG You can run the whole test suite without adding the test target: @@ -1146,7 +1146,7 @@ directly to the container. .. code-block:: bash - pytest tests/providers//test.py + pytest providers/tests//test.py 4. Iterate with the tests and providers. Both providers and tests are mounted from local sources so changes you do locally in both - tests and provider sources are immediately reflected inside the @@ -1171,7 +1171,7 @@ are not part of the public API. We deal with it in one of the following ways: 1) If the whole provider is supposed to only work for later airflow version, we remove the whole provider by excluding it from compatibility test configuration (see below) -2) Some compatibility shims are defined in ``tests/test_utils/compat.py`` - and they can be used to make the +2) Some compatibility shims are defined in ``dev.tests_common.test_utils/compat.py`` - and they can be used to make the tests compatible - for example importing ``ParseImportError`` after the exception has been renamed from ``ImportError`` and it would fail in Airflow 2.9, but we have a fallback import in ``compat.py`` that falls back to old import automatically, so all tests testing / expecting ``ParseImportError`` should import @@ -1184,7 +1184,7 @@ are not part of the public API. We deal with it in one of the following ways: .. code-block:: python - from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS + from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS @pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="The tests should be skipped for Airflow < 2.8") @@ -1197,6 +1197,9 @@ are not part of the public API. We deal with it in one of the following ways: .. code-block:: python + from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES + + @pytest.mark.skipif( RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES, reason="Plugin initialization is done early in case of packages" ) @@ -1280,7 +1283,7 @@ In case you want to reproduce canary run, you need to add ``--clean-airflow-inst .. code-block:: bash - pytest tests/providers//test.py + pytest providers/tests//test.py 7. Iterate with the tests diff --git a/dev/breeze/doc/images/output_build-docs.svg b/dev/breeze/doc/images/output_build-docs.svg index 0ddded9468a55..d52aa78d7ec1f 100644 --- a/dev/breeze/doc/images/output_build-docs.svg +++ b/dev/breeze/doc/images/output_build-docs.svg @@ -203,32 +203,32 @@ Build documents. ╭─ Doc flags ──────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---docs-only-dOnly build documentation. ---spellcheck-only-sOnly run spell checking. ---clean-buildClean inventories of Inter-Sphinx documentation and generated APIs and sphinx     +--docs-only-dOnly build documentation. +--spellcheck-only-sOnly run spell checking. +--clean-buildClean inventories of Inter-Sphinx documentation and generated APIs and sphinx     artifacts before the build - useful for a clean build.                            ---one-pass-onlyBuilds documentation in one pass only. This is useful for debugging sphinx        +--one-pass-onlyBuilds documentation in one pass only. This is useful for debugging sphinx        errors.                                                                           ---package-filterFilter(s) to use more than one can be specified. You can use glob pattern         +--package-filterFilter(s) to use more than one can be specified. You can use glob pattern         matching the full package name, for example `apache-airflow-providers-*`. Useful  when you want to selectseveral similarly named packages together.                 (TEXT)                                                                            ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---builderBuildx builder used to perform `docker buildx build` commands.(TEXT) +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--builderBuildx builder used to perform `docker buildx build` commands.(TEXT) [default: autodetect]                                          ---package-listOptional, contains comma-separated list of package ids that are processed for     +--package-listOptional, contains comma-separated list of package ids that are processed for     documentation building, and document publishing. It is an easier alternative to   adding individual packages as arguments to every command. This overrides the      packages passed as arguments.                                                     (TEXT)                                                                            ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_compile-ui-assets.svg b/dev/breeze/doc/images/output_compile-ui-assets.svg index 4721274d9cf59..3b6304558479e 100644 --- a/dev/breeze/doc/images/output_compile-ui-assets.svg +++ b/dev/breeze/doc/images/output_compile-ui-assets.svg @@ -104,14 +104,14 @@ Compiles ui assets. ╭─ Compile ui assets flag ─────────────────────────────────────────────────────────────────────────────────────────────╮ ---devRun development version of assets compilation - it will not quit and automatically recompile assets +--devRun development version of assets compilation - it will not quit and automatically recompile assets on-the-fly when they are changed.                                                                   ---force-cleanForce cleanup of compile assets before building them. +--force-cleanForce cleanup of compile assets before building them. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_add-back-references.svg b/dev/breeze/doc/images/output_release-management_add-back-references.svg index b2b90c9e28728..37feab7831e79 100644 --- a/dev/breeze/doc/images/output_release-management_add-back-references.svg +++ b/dev/breeze/doc/images/output_release-management_add-back-references.svg @@ -151,14 +151,14 @@ Command to add back references for documentation to make it backward compatible. ╭─ Add Back References to Docs ────────────────────────────────────────────────────────────────────────────────────────╮ -*--airflow-site-directory-aLocal directory path of cloned airflow-site repo.(DIRECTORY)[required] ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. +*--airflow-site-directory-aLocal directory path of cloned airflow-site repo.(DIRECTORY)[required] +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg b/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg index f07aa12760d92..62ed9b25dda68 100644 --- a/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg +++ b/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg @@ -151,16 +151,16 @@ Generates content for issue to test the release. ╭─ Generate issue content flags ───────────────────────────────────────────────────────────────────────────────────────╮ ---disable-progressDisable progress bar ---excluded-pr-listComa-separated list of PRs to exclude from the issue.(TEXT) ---github-tokenGitHub token used to authenticate. You can set omit it if you have GITHUB_TOKEN env      +--disable-progressDisable progress bar +--excluded-pr-listComa-separated list of PRs to exclude from the issue.(TEXT) +--github-tokenGitHub token used to authenticate. You can set omit it if you have GITHUB_TOKEN env      variable set. Can be generated with:                                                     https://github.com/settings/tokens/new?description=Read%20sssues&scopes=repo:status      (TEXT)                                                                                   ---only-available-in-distOnly consider package ids with packages prepared in the dist folder +--only-available-in-distOnly consider package ids with packages prepared in the dist folder ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg b/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg index 19a549cc5a955..6bf07f9bc7f3d 100644 --- a/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg +++ b/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg @@ -187,28 +187,28 @@ Prepare CHANGELOG, README and COMMITS information for providers. ╭─ Provider documentation preparation flags ───────────────────────────────────────────────────────────────────────────╮ ---base-branchBase branch to use as diff for documentation generation (used for releasing from  +--base-branchBase branch to use as diff for documentation generation (used for releasing from  old branch)                                                                       (TEXT)                                                                            ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. ---non-interactiveRun in non-interactive mode. Provides random answers to the type of changes and   +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. +--non-interactiveRun in non-interactive mode. Provides random answers to the type of changes and   confirms releasefor providers prepared for release - useful to test the script in non-interactive mode in CI.                                                       ---only-min-version-updateOnly update minimum version in __init__.py files and regenerate corresponding     +--only-min-version-updateOnly update minimum version in __init__.py files and regenerate corresponding     documentation                                                                     ---reapply-templates-onlyOnly reapply templates, do not bump version. Useful if templates were added and   +--reapply-templates-onlyOnly reapply templates, do not bump version. Useful if templates were added and   you need to regenerate documentation.                                             ---skip-git-fetchSkips removal and recreation of `apache-https-for-providers` remote in git. By    +--skip-git-fetchSkips removal and recreation of `apache-https-for-providers` remote in git. By    default, the remote is recreated and fetched to make sure that it's up to date    and that recent commits are not missing                                           ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_prepare-provider-packages.svg b/dev/breeze/doc/images/output_release-management_prepare-provider-packages.svg index 16e211ec4202d..3c4e468aba0c7 100644 --- a/dev/breeze/doc/images/output_release-management_prepare-provider-packages.svg +++ b/dev/breeze/doc/images/output_release-management_prepare-provider-packages.svg @@ -187,28 +187,28 @@ Prepare sdist/whl packages of Airflow Providers. ╭─ Package flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---clean-distClean dist directory before building packages. Useful when you want to build    +--clean-distClean dist directory before building packages. Useful when you want to build    multiple packages  in a clean environment                                       ---github-repository-gGitHub repository used to pull, push run images.(TEXT) +--github-repository-gGitHub repository used to pull, push run images.(TEXT) [default: apache/airflow]                        ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. ---package-formatFormat of packages.(wheel | sdist | both)[default: wheel] ---package-list-fileRead list of packages from text file (one package per line).(FILENAME) ---skip-deleting-generated-filesSkip deleting files that were used to generate provider package. Useful for     +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. +--package-formatFormat of packages.(wheel | sdist | both)[default: wheel] +--package-list-fileRead list of packages from text file (one package per line).(FILENAME) +--skip-deleting-generated-filesSkip deleting files that were used to generate provider package. Useful for     debugging and developing changes to the build process.                          ---skip-tag-checkSkip checking if the tag already exists in the remote repository ---version-suffix-for-pypiVersion suffix used for PyPI packages (alpha, beta, rc1, etc.).(TEXT) ---package-listOptional, contains comma-separated list of package ids that are processed for   +--skip-tag-checkSkip checking if the tag already exists in the remote repository +--version-suffix-for-pypiVersion suffix used for PyPI packages (alpha, beta, rc1, etc.).(TEXT) +--package-listOptional, contains comma-separated list of package ids that are processed for   documentation building, and document publishing. It is an easier alternative to adding individual packages as arguments to every command. This overrides the    packages passed as arguments.                                                   (TEXT)                                                                          ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management_publish-docs.svg b/dev/breeze/doc/images/output_release-management_publish-docs.svg index db974991886e7..7063e3e6fbff0 100644 --- a/dev/breeze/doc/images/output_release-management_publish-docs.svg +++ b/dev/breeze/doc/images/output_release-management_publish-docs.svg @@ -208,33 +208,33 @@ Command to publish generated documentation to airflow-site ╭─ Publish Docs ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ -*--airflow-site-directory-aLocal directory path of cloned airflow-site repo.(DIRECTORY)[required] ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. ---override-versioned-sOverrides versioned directories. ---package-filterFilter(s) to use more than one can be specified. You can use glob pattern      +*--airflow-site-directory-aLocal directory path of cloned airflow-site repo.(DIRECTORY)[required] +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. +--override-versioned-sOverrides versioned directories. +--package-filterFilter(s) to use more than one can be specified. You can use glob pattern      matching the full package name, for example `apache-airflow-providers-*`.      Useful when you want to selectseveral similarly named packages together.       (TEXT)                                                                         ---package-listOptional, contains comma-separated list of package ids that are processed for  +--package-listOptional, contains comma-separated list of package ids that are processed for  documentation building, and document publishing. It is an easier alternative   to adding individual packages as arguments to every command. This overrides    the packages passed as arguments.                                              (TEXT)                                                                         ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). ---parallelismMaximum number of processes to use while running the operation in parallel. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +--parallelismMaximum number of processes to use while running the operation in parallel. (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       ---run-in-parallelRun the operation in parallel on all or selected subset of parameters. ---skip-cleanupSkip cleanup of temporary files created during parallel run. +--run-in-parallelRun the operation in parallel on all or selected subset of parameters. +--skip-cleanupSkip cleanup of temporary files created during parallel run. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg b/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg index 4f29325c201c1..4b29f11dd3fd9 100644 --- a/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg +++ b/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg @@ -183,9 +183,9 @@ Generate requirements for selected provider. ╭─ Generate provider requirements flags ───────────────────────────────────────────────────────────────────────────────╮ ---pythonPython version to update sbom from. (defaults to all historical python versions) +--pythonPython version to update sbom from. (defaults to all historical python versions) (3.6 | 3.7 | 3.8 | 3.9 | 3.10 | 3.11 | 3.12)                                     ---provider-idProvider id to generate the requirements for                                                   +--provider-idProvider id to generate the requirements for                                                   (airbyte | alibaba | amazon | apache.beam | apache.cassandra | apache.drill | apache.druid |   apache.flink | apache.hdfs | apache.hive | apache.iceberg | apache.impala | apache.kafka |     apache.kylin | apache.livy | apache.pig | apache.pinot | apache.spark | apprise | arangodb |   @@ -198,25 +198,25 @@ | redis | salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp |        snowflake | sqlite | ssh | standard | tableau | telegram | teradata | trino | vertica |        weaviate | yandex | ydb | zendesk)                                                             ---provider-versionProvider version to generate the requirements for i.e `2.1.0`. `latest` is also a supported    +--provider-versionProvider version to generate the requirements for i.e `2.1.0`. `latest` is also a supported    value to account for the most recent version of the provider                                   (TEXT)                                                                                         ---forceForce update providers requirements even if they already exist. +--forceForce update providers requirements even if they already exist. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of parameters. ---parallelismMaximum number of processes to use while running the operation in parallel. +--run-in-parallelRun the operation in parallel on all or selected subset of parameters. +--parallelismMaximum number of processes to use while running the operation in parallel. (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg b/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg index 47e73326a4af2..71feee5875aa1 100644 --- a/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg +++ b/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg @@ -174,7 +174,7 @@ Check that all parameters are put in groups. ╭─ Check all params in groups flags ───────────────────────────────────────────────────────────────────────────────────╮ ---commandCommand(s) to regenerate images for (optional, might be repeated)                                       +--commandCommand(s) to regenerate images for (optional, might be repeated)                                       (build-docs | ci | ci-image | ci-image:build | ci-image:pull | ci-image:verify |                        ci:find-backtracking-candidates | ci:fix-ownership | ci:free-space | ci:get-workflow-info |             ci:resource-check | ci:selective-check | cleanup | compile-ui-assets | compile-www-assets | down | exec @@ -202,9 +202,9 @@ testing:tests)                                                                                          ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_setup_regenerate-command-images.svg b/dev/breeze/doc/images/output_setup_regenerate-command-images.svg index bd11568d5a174..83018c3d8d39f 100644 --- a/dev/breeze/doc/images/output_setup_regenerate-command-images.svg +++ b/dev/breeze/doc/images/output_setup_regenerate-command-images.svg @@ -186,8 +186,8 @@ Regenerate breeze command images. ╭─ Image regeneration option ──────────────────────────────────────────────────────────────────────────────────────────╮ ---forceForces regeneration of all images ---commandCommand(s) to regenerate images for (optional, might be repeated)                                    +--forceForces regeneration of all images +--commandCommand(s) to regenerate images for (optional, might be repeated)                                    (build-docs | ci | ci-image | ci-image:build | ci-image:pull | ci-image:verify |                     ci:find-backtracking-candidates | ci:fix-ownership | ci:free-space | ci:get-workflow-info |          ci:resource-check | ci:selective-check | cleanup | compile-ui-assets | compile-www-assets | down |   @@ -214,13 +214,13 @@ | setup:version | shell | start-airflow | static-checks | testing | testing:db-tests |               testing:docker-compose-tests | testing:helm-tests | testing:integration-tests | testing:non-db-tests | testing:tests)                                                                                     ---check-onlyOnly check if some images need to be regenerated. Return 0 if no need or 1 if needed. Cannot be used -together with --command flag or --force.                                                             +--check-onlyOnly check if some images need to be regenerated. Return 0 if no need or 1 if needed. Cannot be used +together with --command flag or --force.                                                             ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_static-checks.svg b/dev/breeze/doc/images/output_static-checks.svg index ade76225d105a..e3c89b304289b 100644 --- a/dev/breeze/doc/images/output_static-checks.svg +++ b/dev/breeze/doc/images/output_static-checks.svg @@ -351,9 +351,9 @@ check-persist-credentials-disabled-in-github-workflows |                          check-pre-commit-information-consistent | check-provide-create-sessions-imports | check-provider-docs-valid | check-provider-yaml-valid |                           -check-providers-init-file-missing | check-providers-subpackages-init-file-exist | -check-pydevd-left-in-code | check-revision-heads-map |                            -check-safe-filter-usage-in-html | check-sql-dependency-common-data-structure |    +check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |         +check-revision-heads-map | check-safe-filter-usage-in-html |                      +check-sql-dependency-common-data-structure |                                      check-start-date-not-used-in-defaults | check-system-tests-present |              check-system-tests-tocs | check-taskinstance-tis-attrs |                          check-template-context-variable-in-sync | check-template-fields-valid |           diff --git a/dev/breeze/doc/images/output_static-checks.txt b/dev/breeze/doc/images/output_static-checks.txt index 0b4fae743bd10..e917996931d60 100644 --- a/dev/breeze/doc/images/output_static-checks.txt +++ b/dev/breeze/doc/images/output_static-checks.txt @@ -1 +1 @@ -769905ba0e5eea7f79c37b2d047699e0 +08a7e37cd651e4d1eb702cb347d9b061 diff --git a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py index c6cc343ae4aee..f97b6b573a463 100644 --- a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py @@ -607,7 +607,7 @@ def _rebuild_k8s_image( COPY --chown=airflow:0 airflow/example_dags/ /opt/airflow/dags/ -COPY --chown=airflow:0 airflow/providers/cncf/kubernetes/kubernetes_executor_templates/ /opt/airflow/pod_templates/ +COPY --chown=airflow:0 providers/src/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/ /opt/airflow/pod_templates/ ENV GUNICORN_CMD_ARGS='--preload' AIRFLOW__WEBSERVER__WORKER_REFRESH_INTERVAL=0 """ diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands.py b/dev/breeze/src/airflow_breeze/commands/testing_commands.py index 46ab710dbfa7b..61b260bea09ae 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands.py @@ -791,7 +791,7 @@ def _run_test_command( perform_environment_checks() if skip_providers: ignored_path_list = [ - f"--ignore=tests/providers/{provider_id.replace('.','/')}" + f"--ignore=providers/tests/{provider_id.replace('.','/')}" for provider_id in skip_providers.split(" ") ] extra_pytest_args = (*extra_pytest_args, *ignored_path_list) diff --git a/dev/breeze/src/airflow_breeze/pre_commit_ids.py b/dev/breeze/src/airflow_breeze/pre_commit_ids.py index 03873ed8ac7bf..e684acd24ad2d 100644 --- a/dev/breeze/src/airflow_breeze/pre_commit_ids.py +++ b/dev/breeze/src/airflow_breeze/pre_commit_ids.py @@ -71,7 +71,6 @@ "check-provide-create-sessions-imports", "check-provider-docs-valid", "check-provider-yaml-valid", - "check-providers-init-file-missing", "check-providers-subpackages-init-file-exist", "check-pydevd-left-in-code", "check-revision-heads-map", diff --git a/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py b/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py index 88ad3e8c8cf3a..ebe8940294966 100644 --- a/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py +++ b/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py @@ -37,7 +37,7 @@ render_template, tag_exists_for_provider, ) -from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT +from airflow_breeze.utils.path_utils import AIRFLOW_PROVIDERS_SRC, AIRFLOW_SOURCES_ROOT from airflow_breeze.utils.run_utils import run_command LICENCE_RST = """ @@ -83,7 +83,7 @@ def copy_provider_sources_to_target(provider_id: str) -> Path: rmtree(target_provider_root_path, ignore_errors=True) target_provider_root_path.mkdir(parents=True) source_provider_sources_path = get_source_package_path(provider_id) - relative_provider_path = source_provider_sources_path.relative_to(AIRFLOW_SOURCES_ROOT) + relative_provider_path = source_provider_sources_path.relative_to(AIRFLOW_PROVIDERS_SRC) target_providers_sub_folder = target_provider_root_path / relative_provider_path get_console().print( f"[info]Copying provider sources: {source_provider_sources_path} -> {target_providers_sub_folder}" diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py index ef7779afd9749..618d064e0e41f 100644 --- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py @@ -23,6 +23,7 @@ import os import re import sys +from functools import lru_cache from subprocess import DEVNULL, CalledProcessError, CompletedProcess from typing import TYPE_CHECKING @@ -91,6 +92,7 @@ ("generated", "/opt/airflow/generated"), ("hooks", "/opt/airflow/hooks"), ("logs", "/root/airflow/logs"), + ("providers", "/opt/airflow/providers"), ("pyproject.toml", "/opt/airflow/pyproject.toml"), ("scripts", "/opt/airflow/scripts"), ("scripts/docker/entrypoint_ci.sh", "/entrypoint"), @@ -502,6 +504,7 @@ def check_executable_entrypoint_permissions(quiet: bool = False): get_console().print("[success]Executable permissions on entrypoints are OK[/]") +@lru_cache def perform_environment_checks(quiet: bool = False): check_docker_is_running() check_docker_version(quiet) diff --git a/dev/breeze/src/airflow_breeze/utils/packages.py b/dev/breeze/src/airflow_breeze/utils/packages.py index 5c22c41e0ba6a..5ee2c2a3edfc3 100644 --- a/dev/breeze/src/airflow_breeze/utils/packages.py +++ b/dev/breeze/src/airflow_breeze/utils/packages.py @@ -36,7 +36,7 @@ ) from airflow_breeze.utils.console import get_console from airflow_breeze.utils.path_utils import ( - AIRFLOW_PROVIDERS_ROOT, + AIRFLOW_PROVIDERS_NS_PACKAGE, BREEZE_SOURCES_ROOT, DOCS_ROOT, GENERATED_PROVIDER_PACKAGES_DIR, @@ -382,7 +382,7 @@ def find_matching_long_package_names( def get_source_package_path(provider_id: str) -> Path: - return AIRFLOW_PROVIDERS_ROOT.joinpath(*provider_id.split(".")) + return AIRFLOW_PROVIDERS_NS_PACKAGE.joinpath(*provider_id.split(".")) def get_documentation_package_path(provider_id: str) -> Path: diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py b/dev/breeze/src/airflow_breeze/utils/path_utils.py index 064a61771fb5b..4e510cb768009 100644 --- a/dev/breeze/src/airflow_breeze/utils/path_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py @@ -281,9 +281,11 @@ def find_airflow_sources_root_to_operate_on() -> Path: AIRFLOW_SOURCES_ROOT = find_airflow_sources_root_to_operate_on().resolve() AIRFLOW_WWW_DIR = AIRFLOW_SOURCES_ROOT / "airflow" / "www" AIRFLOW_UI_DIR = AIRFLOW_SOURCES_ROOT / "airflow" / "ui" -TESTS_PROVIDERS_ROOT = AIRFLOW_SOURCES_ROOT / "tests" / "providers" -SYSTEM_TESTS_PROVIDERS_ROOT = AIRFLOW_SOURCES_ROOT / "tests" / "system" / "providers" -AIRFLOW_PROVIDERS_ROOT = AIRFLOW_SOURCES_ROOT / "airflow" / "providers" +AIRFLOW_PROVIDERS_PROJECT = AIRFLOW_SOURCES_ROOT / "providers" +AIRFLOW_PROVIDERS_SRC = AIRFLOW_PROVIDERS_PROJECT / "src" +AIRFLOW_PROVIDERS_NS_PACKAGE = AIRFLOW_PROVIDERS_SRC / "airflow" / "providers" +TESTS_PROVIDERS_ROOT = AIRFLOW_PROVIDERS_PROJECT / "tests" +SYSTEM_TESTS_PROVIDERS_ROOT = AIRFLOW_PROVIDERS_PROJECT / "tests" / "system" DOCS_ROOT = AIRFLOW_SOURCES_ROOT / "docs" BUILD_CACHE_DIR = AIRFLOW_SOURCES_ROOT / ".build" GENERATED_DIR = AIRFLOW_SOURCES_ROOT / "generated" diff --git a/dev/breeze/src/airflow_breeze/utils/provider_dependencies.py b/dev/breeze/src/airflow_breeze/utils/provider_dependencies.py index cad78f1e6d2d9..72d07601601ab 100644 --- a/dev/breeze/src/airflow_breeze/utils/provider_dependencies.py +++ b/dev/breeze/src/airflow_breeze/utils/provider_dependencies.py @@ -23,7 +23,7 @@ from airflow_breeze.utils.console import get_console from airflow_breeze.utils.github import get_tag_date -from airflow_breeze.utils.path_utils import AIRFLOW_PROVIDERS_ROOT, PROVIDER_DEPENDENCIES_JSON_FILE_PATH +from airflow_breeze.utils.path_utils import AIRFLOW_PROVIDERS_NS_PACKAGE, PROVIDER_DEPENDENCIES_JSON_FILE_PATH DEPENDENCIES = json.loads(PROVIDER_DEPENDENCIES_JSON_FILE_PATH.read_text()) @@ -67,7 +67,7 @@ def generate_providers_metadata_for_package( ) -> dict[str, dict[str, str]]: get_console().print(f"[info]Generating metadata for {provider_id}") provider_yaml_dict = yaml.safe_load( - (AIRFLOW_PROVIDERS_ROOT.joinpath(*provider_id.split(".")) / "provider.yaml").read_text() + (AIRFLOW_PROVIDERS_NS_PACKAGE.joinpath(*provider_id.split(".")) / "provider.yaml").read_text() ) provider_metadata: dict[str, dict[str, str]] = {} last_airflow_version = START_AIRFLOW_VERSION_FROM diff --git a/dev/breeze/src/airflow_breeze/utils/publish_docs_helpers.py b/dev/breeze/src/airflow_breeze/utils/publish_docs_helpers.py index 8c5d63748cb74..bb5ccb1e82906 100644 --- a/dev/breeze/src/airflow_breeze/utils/publish_docs_helpers.py +++ b/dev/breeze/src/airflow_breeze/utils/publish_docs_helpers.py @@ -19,14 +19,19 @@ import json import os -from glob import glob from pathlib import Path from typing import Any +from airflow_breeze.utils.path_utils import ( + AIRFLOW_PROVIDERS_NS_PACKAGE, + AIRFLOW_SOURCES_ROOT, + SYSTEM_TESTS_PROVIDERS_ROOT, +) + CONSOLE_WIDTH = 180 -ROOT_DIR = Path(__file__).parents[5].resolve() -PROVIDER_DATA_SCHEMA_PATH = ROOT_DIR / "airflow" / "provider.yaml.schema.json" + +PROVIDER_DATA_SCHEMA_PATH = AIRFLOW_SOURCES_ROOT / "airflow" / "provider.yaml.schema.json" def _load_schema() -> dict[str, Any]: @@ -36,22 +41,17 @@ def _load_schema() -> dict[str, Any]: def _filepath_to_module(filepath: str): - return str(Path(filepath).relative_to(ROOT_DIR)).replace("/", ".") + # TODO: handle relative to providers project + return str(Path(filepath).relative_to(AIRFLOW_SOURCES_ROOT)).replace("/", ".") def _filepath_to_system_tests(filepath: str): - return str( - ROOT_DIR - / "tests" - / "system" - / "providers" - / Path(filepath).relative_to(ROOT_DIR / "airflow" / "providers") - ) + return str(SYSTEM_TESTS_PROVIDERS_ROOT / Path(filepath).relative_to(AIRFLOW_PROVIDERS_NS_PACKAGE)) def get_provider_yaml_paths(): """Returns list of provider.yaml files""" - return sorted(glob(f"{ROOT_DIR}/airflow/providers/**/provider.yaml", recursive=True)) + return sorted(AIRFLOW_PROVIDERS_NS_PACKAGE.glob("**/provider.yaml")) def pretty_format_path(path: str, start: str) -> str: diff --git a/dev/breeze/src/airflow_breeze/utils/run_tests.py b/dev/breeze/src/airflow_breeze/utils/run_tests.py index 840bf2fcad6d2..23fe5b6e1965e 100644 --- a/dev/breeze/src/airflow_breeze/utils/run_tests.py +++ b/dev/breeze/src/airflow_breeze/utils/run_tests.py @@ -25,7 +25,7 @@ from airflow_breeze.global_constants import PIP_VERSION from airflow_breeze.utils.console import Output, get_console from airflow_breeze.utils.packages import get_excluded_provider_folders, get_suspended_provider_folders -from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT +from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, TESTS_PROVIDERS_ROOT from airflow_breeze.utils.run_utils import run_command from airflow_breeze.utils.virtualenv_utils import create_temp_venv @@ -113,9 +113,9 @@ def get_ignore_switches_for_provider(provider_folders: list[str]) -> list[str]: for providers in provider_folders: args.extend( [ - f"--ignore=tests/providers/{providers}", - f"--ignore=tests/system/providers/{providers}", - f"--ignore=tests/integration/providers/{providers}", + f"--ignore=providers/tests/{providers}", + f"--ignore=providers/tests/system/{providers}", + f"--ignore=providers/tests/integration/{providers}", ] ) return args @@ -161,7 +161,7 @@ def get_excluded_provider_args(python_version: str) -> list[str]: "tests/operators/test_python.py::TestPythonVirtualenvOperator::test_airflow_context", "--assert=plain", ], - "Providers": ["tests/providers"], + "Providers": ["providers/tests"], "PythonVenv": [ "tests/operators/test_python.py::TestPythonVirtualenvOperator", ], @@ -182,11 +182,10 @@ def get_excluded_provider_args(python_version: str) -> list[str]: NO_RECURSE_DIRS = [ "tests/_internals", "tests/dags_with_system_exit", - "tests/test_utils", "tests/dags_corrupted", "tests/dags", - "tests/system/providers/google/cloud/dataproc/resources", - "tests/system/providers/google/cloud/gcs/resources", + "providers/tests/system/google/cloud/dataproc/resources", + "providers/tests/system/google/cloud/gcs/resources", ] @@ -255,16 +254,16 @@ def convert_test_type_to_pytest_args( providers_with_exclusions = TEST_TYPE_MAP_TO_PYTEST_ARGS["Providers"].copy() for excluded_provider in excluded_provider_list: providers_with_exclusions.append( - "--ignore=tests/providers/" + excluded_provider.replace(".", "/") + "--ignore=providers/tests/" + excluded_provider.replace(".", "/") ) return providers_with_exclusions if test_type.startswith(PROVIDERS_LIST_PREFIX): provider_list = test_type[len(PROVIDERS_LIST_PREFIX) : -1].split(",") providers_to_test = [] for provider in provider_list: - provider_path = "tests/providers/" + provider.replace(".", "/") - if (AIRFLOW_SOURCES_ROOT / provider_path).is_dir(): - providers_to_test.append(provider_path) + provider_path = TESTS_PROVIDERS_ROOT.joinpath(provider.replace(".", "/")) + if provider_path.is_dir(): + providers_to_test.append(provider_path.relative_to(AIRFLOW_SOURCES_ROOT).as_posix()) else: get_console().print( f"[error]Provider directory {provider_path} does not exist for {provider}. " @@ -346,9 +345,9 @@ def generate_args_for_pytest( if run_db_tests_only: args.append("--run-db-tests-only") if test_type != "System": - args.append(f"--ignore={SYSTEM_TESTS}") + args.append(f"--ignore-glob=*/{SYSTEM_TESTS}") if test_type != "Integration": - args.append(f"--ignore={INTEGRATION_TESTS}") + args.append(f"--ignore-glob=*/{INTEGRATION_TESTS}") if test_type != "Helm": # do not produce warnings output for helm tests args.append(f"--warning-output-path={warnings_file}") @@ -402,12 +401,12 @@ def convert_parallel_types_to_folders( ) ) # leave only folders, strip --pytest-args that exclude some folders with `-' prefix - folders = [arg for arg in args if arg.startswith("test")] - # remove specific provider sub-folders if "tests/providers" is already in the list + folders = [arg for arg in args if arg.startswith("test") or arg.startswith("providers/tests")] + # remove specific provider sub-folders if "providers/tests" is already in the list # This workarounds pytest issues where it will only run tests from specific subfolders # if both parent and child folders are in the list # The issue in Pytest (changed behaviour in Pytest 8.2 is tracked here # https://github.com/pytest-dev/pytest/issues/12605 - if "tests/providers" in folders: - folders = [folder for folder in folders if not folder.startswith("tests/providers/")] + if "providers/tests" in folders: + folders = [folder for folder in folders if not folder.startswith("providers/tests/")] return folders diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index 385cd6f14411a..dfc24993f274a 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -59,7 +59,7 @@ from airflow_breeze.utils.kubernetes_utils import get_kubernetes_python_combos from airflow_breeze.utils.packages import get_available_packages from airflow_breeze.utils.path_utils import ( - AIRFLOW_PROVIDERS_ROOT, + AIRFLOW_PROVIDERS_NS_PACKAGE, AIRFLOW_SOURCES_ROOT, DOCS_DIR, SYSTEM_TESTS_PROVIDERS_ROOT, @@ -177,7 +177,7 @@ def __hash__(self): r"^\.github/SECURITY\.rst$", r"^airflow/.*\.py$", r"^chart", - r"^providers", + r"^providers/src/", r"^tests/system", r"^CHANGELOG\.txt", r"^airflow/config_templates/config\.yml", @@ -200,9 +200,9 @@ def __hash__(self): FileGroupForCi.KUBERNETES_FILES: [ r"^chart", r"^kubernetes_tests", - r"^airflow/providers/cncf/kubernetes/", - r"^tests/providers/cncf/kubernetes/", - r"^tests/system/providers/cncf/kubernetes/", + r"^providers/src/airflow/providers/cncf/kubernetes/", + r"^providers/tests/cncf/kubernetes/", + r"^providers/tests/system/cncf/kubernetes/", ], FileGroupForCi.ALL_PYTHON_FILES: [ r".*\.py$", @@ -211,9 +211,9 @@ def __hash__(self): r".*\.py$", ], FileGroupForCi.ALL_PROVIDERS_PYTHON_FILES: [ - r"^airflow/providers/.*\.py$", - r"^tests/providers/.*\.py$", - r"^tests/system/providers/.*\.py$", + r"^providers/src/airflow/providers/.*\.py$", + r"^providers/tests/.*\.py$", + r"^providers/tests/system/.*\.py$", ], FileGroupForCi.ALL_DOCS_PYTHON_FILES: [ r"^docs/.*\.py$", @@ -225,6 +225,8 @@ def __hash__(self): r"^.pre-commit-config.yaml$", r"^airflow", r"^chart", + r"^providers/src/", + r"^providers/tests/", r"^tests", r"^kubernetes_tests", ], @@ -239,7 +241,7 @@ def __hash__(self): ], FileGroupForCi.TESTS_UTILS_FILES: [ r"^tests/utils/", - r"^tests/test_utils/", + r"^dev/tests_common/.*\.py$", ], } ) @@ -249,12 +251,12 @@ def __hash__(self): FileGroupForCi.ALL_AIRFLOW_PYTHON_FILES: [ r"^.*/.*_vendor/.*", r"^airflow/migrations/.*", - r"^airflow/providers/.*", + r"^providers/src/airflow/providers/.*", r"^dev/.*", r"^docs/.*", r"^provider_packages/.*", - r"^tests/providers/.*", - r"^tests/system/providers/.*", + r"^providers/tests/.*", + r"^providers/tests/system/.*", r"^tests/dags/test_imports.py", ] } @@ -286,9 +288,9 @@ def __hash__(self): r"^tests/operators/", ], SelectiveUnitTestTypes.PROVIDERS: [ - r"^airflow/providers/", - r"^tests/system/providers/", - r"^tests/providers/", + r"^providers/src/airflow/providers/", + r"^providers/tests/system/", + r"^providers/tests/", ], SelectiveUnitTestTypes.SERIALIZATION: [ r"^airflow/serialization/", @@ -308,7 +310,7 @@ def __hash__(self): def find_provider_affected(changed_file: str, include_docs: bool) -> str | None: file_path = AIRFLOW_SOURCES_ROOT / changed_file # is_relative_to is only available in Python 3.9 - we should simplify this check when we are Python 3.9+ - for provider_root in (TESTS_PROVIDERS_ROOT, SYSTEM_TESTS_PROVIDERS_ROOT, AIRFLOW_PROVIDERS_ROOT): + for provider_root in (TESTS_PROVIDERS_ROOT, SYSTEM_TESTS_PROVIDERS_ROOT, AIRFLOW_PROVIDERS_NS_PACKAGE): try: file_path.relative_to(provider_root) relative_base_path = provider_root @@ -329,7 +331,7 @@ def find_provider_affected(changed_file: str, include_docs: bool) -> str | None: if parent_dir_path == relative_base_path: break relative_path = parent_dir_path.relative_to(relative_base_path) - if (AIRFLOW_PROVIDERS_ROOT / relative_path / "provider.yaml").exists(): + if (AIRFLOW_PROVIDERS_NS_PACKAGE / relative_path / "provider.yaml").exists(): return str(parent_dir_path.relative_to(relative_base_path)).replace(os.sep, ".") # If we got here it means that some "common" files were modified. so we need to test all Providers return "Providers" diff --git a/dev/breeze/tests/test_packages.py b/dev/breeze/tests/test_packages.py index a3126b3f55499..e828f24cf1bb0 100644 --- a/dev/breeze/tests/test_packages.py +++ b/dev/breeze/tests/test_packages.py @@ -47,7 +47,7 @@ get_suspended_provider_ids, validate_provider_info_with_runtime_schema, ) -from airflow_breeze.utils.path_utils import AIRFLOW_PROVIDERS_ROOT, AIRFLOW_SOURCES_ROOT, DOCS_ROOT +from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, DOCS_ROOT def test_get_available_packages(): @@ -151,7 +151,9 @@ def test_find_matching_long_package_name_bad_filter(): def test_get_source_package_path(): - assert get_source_package_path("apache.hdfs") == AIRFLOW_PROVIDERS_ROOT / "apache" / "hdfs" + assert get_source_package_path("apache.hdfs") == AIRFLOW_SOURCES_ROOT.joinpath( + "providers", "src", "airflow", "providers", "apache", "hdfs" + ) def test_get_documentation_package_path(): @@ -318,9 +320,12 @@ def test_get_provider_details(): assert provider_details.provider_id == "asana" assert provider_details.full_package_name == "airflow.providers.asana" assert provider_details.pypi_package_name == "apache-airflow-providers-asana" - assert ( - provider_details.source_provider_package_path - == AIRFLOW_SOURCES_ROOT / "airflow" / "providers" / "asana" + assert provider_details.source_provider_package_path == AIRFLOW_SOURCES_ROOT.joinpath( + "providers", + "src", + "airflow", + "providers", + "asana", ) assert ( provider_details.documentation_provider_package_path == DOCS_ROOT / "apache-airflow-providers-asana" @@ -496,7 +501,7 @@ def test_provider_jinja_context(): "RELEASE_NO_LEADING_ZEROS": version, "VERSION_SUFFIX": ".rc1", "PROVIDER_DESCRIPTION": "Amazon integration (including `Amazon Web Services (AWS) `__).\n", - "CHANGELOG_RELATIVE_PATH": "../../airflow/providers/amazon", + "CHANGELOG_RELATIVE_PATH": "../../providers/src/airflow/providers/amazon", "SUPPORTED_PYTHON_VERSIONS": ["3.9", "3.10", "3.11", "3.12"], "PLUGINS": [], "MIN_AIRFLOW_VERSION": "2.8.0", diff --git a/dev/breeze/tests/test_provider_documentation.py b/dev/breeze/tests/test_provider_documentation.py index e2de9fee9fbf3..db770b7856a2c 100644 --- a/dev/breeze/tests/test_provider_documentation.py +++ b/dev/breeze/tests/test_provider_documentation.py @@ -217,7 +217,7 @@ def test_convert_git_changes_to_table(input: str, output: str, markdown: bool, c def test_verify_changelog_exists(): assert ( _verify_changelog_exists("asana") - == AIRFLOW_SOURCES_ROOT / "airflow" / "providers" / "asana" / "CHANGELOG.rst" + == AIRFLOW_SOURCES_ROOT / "providers" / "src" / "airflow" / "providers" / "asana" / "CHANGELOG.rst" ) diff --git a/dev/breeze/tests/test_pytest_args_for_test_types.py b/dev/breeze/tests/test_pytest_args_for_test_types.py index 7ecbbf4b5bf3c..de636a9893d27 100644 --- a/dev/breeze/tests/test_pytest_args_for_test_types.py +++ b/dev/breeze/tests/test_pytest_args_for_test_types.py @@ -74,7 +74,7 @@ ), ( "Providers", - ["tests/providers"], + ["providers/tests"], False, ), ( @@ -84,26 +84,26 @@ ), ( "Providers[amazon]", - ["tests/providers/amazon"], + ["providers/tests/amazon"], False, ), ( "Providers[common.io]", - ["tests/providers/common/io"], + ["providers/tests/common/io"], False, ), ( "Providers[amazon,google,apache.hive]", - ["tests/providers/amazon", "tests/providers/google", "tests/providers/apache/hive"], + ["providers/tests/amazon", "providers/tests/google", "providers/tests/apache/hive"], False, ), ( "Providers[-amazon,google,microsoft.azure]", [ - "tests/providers", - "--ignore=tests/providers/amazon", - "--ignore=tests/providers/google", - "--ignore=tests/providers/microsoft/azure", + "providers/tests", + "--ignore=providers/tests/amazon", + "--ignore=providers/tests/google", + "--ignore=providers/tests/microsoft/azure", ], False, ), @@ -269,7 +269,7 @@ def test_pytest_args_for_helm_test_types(helm_test_package: str, pytest_args: li "tests/models", "tests/ti_deps", "tests/utils", - "tests/providers", + "providers/tests", ], False, ), @@ -282,7 +282,7 @@ def test_pytest_args_for_helm_test_types(helm_test_package: str, pytest_args: li "tests/models", "tests/ti_deps", "tests/utils", - "tests/providers/amazon", + "providers/tests/amazon", ], False, ), @@ -295,8 +295,8 @@ def test_pytest_args_for_helm_test_types(helm_test_package: str, pytest_args: li "tests/models", "tests/ti_deps", "tests/utils", - "tests/providers/amazon", - "tests/providers/google", + "providers/tests/amazon", + "providers/tests/google", ], False, ), @@ -309,7 +309,7 @@ def test_pytest_args_for_helm_test_types(helm_test_package: str, pytest_args: li "tests/models", "tests/ti_deps", "tests/utils", - "tests/providers", + "providers/tests", ], False, ), @@ -334,7 +334,7 @@ def test_pytest_args_for_helm_test_types(helm_test_package: str, pytest_args: li "tests/models", "tests/ti_deps", "tests/utils", - "tests/providers", + "providers/tests", ], False, ), diff --git a/dev/breeze/tests/test_run_test_args.py b/dev/breeze/tests/test_run_test_args.py index 66d57c8d7822e..58e341e4dfa12 100644 --- a/dev/breeze/tests/test_run_test_args.py +++ b/dev/breeze/tests/test_run_test_args.py @@ -75,7 +75,7 @@ def test_irregular_provider_with_extra_ignore_should_be_valid_cmd(mock_run_comma _run_test( shell_params=ShellParams(test_type="Providers"), - extra_pytest_args=(f"--ignore=tests/providers/{fake_provider_name}",), + extra_pytest_args=(f"--ignore=providers/tests/{fake_provider_name}",), python_version="3.9", output=None, test_timeout=60, @@ -88,12 +88,12 @@ def test_irregular_provider_with_extra_ignore_should_be_valid_cmd(mock_run_comma arg_str = " ".join(run_cmd_call.args[0]) # The command pattern we look for is " \ - # <*other args we don't care about*> --ignore tests/providers/ \ - # --ignore tests/system/providers/ --ignore tests/integration/providers/" + # <*other args we don't care about*> --ignore providers/tests/ \ + # --ignore providers/tests/system/ --ignore providers/tests/integration/" # (the container id is simply to anchor the pattern so we know where we are starting; _run_tests should # be refactored to make arg testing easier but until then we have to regex-test the entire command string match_pattern = re.compile( - f" airflow tests/providers .+ --ignore=tests/providers/{fake_provider_name} --ignore=tests/system/providers/{fake_provider_name} --ignore=tests/integration/providers/{fake_provider_name}" + f" airflow providers/tests .+ --ignore=providers/tests/{fake_provider_name} --ignore=providers/tests/system/{fake_provider_name} --ignore=providers/tests/integration/{fake_provider_name}" ) assert match_pattern.search(arg_str) @@ -104,7 +104,7 @@ def test_primary_test_arg_is_excluded_by_extra_pytest_arg(mock_run_command): test_provider_not_skipped = "ftp" _run_test( shell_params=ShellParams(test_type=f"Providers[{test_provider},{test_provider_not_skipped}]"), - extra_pytest_args=(f"--ignore=tests/providers/{test_provider}",), + extra_pytest_args=(f"--ignore=providers/tests/{test_provider}",), python_version="3.9", output=None, test_timeout=60, @@ -116,13 +116,13 @@ def test_primary_test_arg_is_excluded_by_extra_pytest_arg(mock_run_command): arg_str = " ".join(run_cmd_call.args[0]) # The command pattern we look for is " --verbosity=0 \ - # <*other args we don't care about*> --ignore=tests/providers/" - # The tests/providers/http argument has been eliminated by the code that preps the args; this is a bug, + # <*other args we don't care about*> --ignore=providers/tests/" + # The providers/tests/http argument has been eliminated by the code that preps the args; this is a bug, # bc without a directory or module arg, pytest tests everything (which we don't want!) # We check "--verbosity=0" to ensure nothing is between the airflow container id and the verbosity arg, # IOW that the primary test arg is removed match_pattern = re.compile( - f"airflow tests/providers/{test_provider_not_skipped} --verbosity=0 .+ --ignore=tests/providers/{test_provider}" + f"airflow providers/tests/{test_provider_not_skipped} --verbosity=0 .+ --ignore=providers/tests/{test_provider}" ) assert match_pattern.search(arg_str) @@ -135,7 +135,7 @@ def test_test_is_skipped_if_all_are_ignored(mock_run_command): ] # "Providers[]" scans the source tree so we need to use a real provider id _run_test( shell_params=ShellParams(test_type=f"Providers[{','.join(test_providers)}]"), - extra_pytest_args=[f"--ignore=tests/providers/{provider}" for provider in test_providers], + extra_pytest_args=[f"--ignore=providers/tests/{provider}" for provider in test_providers], python_version="3.9", output=None, test_timeout=60, diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index 3b58a45ae1b9a..be3922d461963 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -87,7 +87,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): print_in_color("\nOutput received:") print_in_color(received_output_as_dict) print_in_color() - assert received_value == expected_value + assert received_value == expected_value, f"Correct value for {expected_key!r}" else: print( f"\n[red]ERROR: The key '{expected_key}' missing but " @@ -320,7 +320,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): pytest.param( ( "airflow/api/file.py", - "tests/providers/postgres/file.py", + "providers/tests/postgres/file.py", ), { "affected-providers-list-as-string": "amazon common.compat common.sql fab google openlineage " @@ -353,7 +353,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ), ( pytest.param( - ("tests/providers/apache/beam/file.py",), + ("providers/tests/apache/beam/file.py",), { "affected-providers-list-as-string": "apache.beam google", "all-python-versions": "['3.9']", @@ -410,7 +410,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): pytest.param( ( "chart/aaaa.txt", - "tests/providers/postgres/file.py", + "providers/tests/postgres/file.py", ), { "affected-providers-list-as-string": "amazon common.sql google openlineage " @@ -445,7 +445,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ( "INTHEWILD.md", "chart/aaaa.txt", - "tests/providers/http/file.py", + "providers/tests/http/file.py", ), { "affected-providers-list-as-string": "amazon apache.livy " @@ -482,7 +482,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ( "INTHEWILD.md", "chart/aaaa.txt", - "tests/providers/airbyte/file.py", + "providers/tests/airbyte/file.py", ), { "affected-providers-list-as-string": "airbyte", @@ -514,7 +514,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ( "INTHEWILD.md", "chart/aaaa.txt", - "tests/system/utils/file.py", + "foo/other.py", ), { "affected-providers-list-as-string": None, @@ -595,7 +595,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ) ), pytest.param( - ("airflow/providers/amazon/__init__.py",), + ("providers/src/airflow/providers/amazon/__init__.py",), { "affected-providers-list-as-string": "amazon apache.hive cncf.kubernetes " "common.compat common.sql exasol ftp google http imap microsoft.azure " @@ -623,7 +623,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): id="Providers tests run including amazon tests if amazon provider files changed", ), pytest.param( - ("tests/providers/airbyte/__init__.py",), + ("providers/tests/airbyte/__init__.py",), { "affected-providers-list-as-string": "airbyte", "all-python-versions": "['3.9']", @@ -647,7 +647,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): id="Providers tests run without amazon tests if no amazon file changed", ), pytest.param( - ("airflow/providers/amazon/file.py",), + ("providers/src/airflow/providers/amazon/file.py",), { "affected-providers-list-as-string": "amazon apache.hive cncf.kubernetes " "common.compat common.sql exasol ftp google http imap microsoft.azure " @@ -677,8 +677,8 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): pytest.param( ( "tests/always/test_project_structure.py", - "tests/providers/common/io/operators/__init__.py", - "tests/providers/common/io/operators/test_file_transfer.py", + "providers/tests/common/io/operators/__init__.py", + "providers/tests/common/io/operators/test_file_transfer.py", ), { "affected-providers-list-as-string": "common.compat common.io openlineage", @@ -703,9 +703,9 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): id="Only Always and common providers tests should run when only common.io and tests/always changed", ), pytest.param( - ("airflow/providers/standard/operators/bash.py",), + ("providers/src/airflow/providers/standard/operators/bash.py",), { - "affected-providers-list-as-string": "celery edge standard", + "affected-providers-list-as-string": "standard", "all-python-versions": "['3.9']", "all-python-versions-list-as-string": "3.9", "python-versions": "['3.9']", @@ -720,14 +720,14 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "skip-pre-commits": "identity,lint-helm-chart,mypy-airflow,mypy-dev,mypy-docs,mypy-providers," "ts-compile-format-lint-ui,ts-compile-format-lint-www", "upgrade-to-newer-dependencies": "false", - "parallel-test-types-list-as-string": "Always Core Providers[celery,edge,standard] Serialization", + "parallel-test-types-list-as-string": "Always Core Providers[standard] Serialization", "needs-mypy": "true", "mypy-folders": "['providers']", }, id="Providers standard tests and Serialization tests to run when airflow bash.py changed", ), pytest.param( - ("tests/operators/bash.py",), + ("providers/tests/standard/operators/bash.py",), { "affected-providers-list-as-string": None, "all-python-versions": "['3.9']", @@ -741,12 +741,12 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "run-amazon-tests": "false", "docs-build": "false", "run-kubernetes-tests": "false", - "skip-pre-commits": "check-provider-yaml-valid,identity,lint-helm-chart,mypy-airflow,mypy-dev,mypy-docs,mypy-providers," + "skip-pre-commits": "identity,lint-helm-chart,mypy-airflow,mypy-dev,mypy-docs,mypy-providers," "ts-compile-format-lint-ui,ts-compile-format-lint-www", "upgrade-to-newer-dependencies": "false", - "parallel-test-types-list-as-string": "Always Core Operators Serialization", + "parallel-test-types-list-as-string": "Always Core Providers[standard] Serialization", "needs-mypy": "true", - "mypy-folders": "['airflow']", + "mypy-folders": "['providers']", }, id="Force Core and Serialization tests to run when tests bash changed", ), @@ -776,6 +776,32 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): id="All tests should be run when tests/utils/ change", ) ), + ( + pytest.param( + ("dev/tests_common/__init__.py",), + { + "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, + "all-python-versions": "['3.9']", + "all-python-versions-list-as-string": "3.9", + "python-versions": "['3.9']", + "python-versions-list-as-string": "3.9", + "ci-image-build": "true", + "prod-image-build": "true", + "needs-helm-tests": "true", + "run-tests": "true", + "run-amazon-tests": "true", + "docs-build": "true", + "full-tests-needed": "true", + "skip-pre-commits": "identity,mypy-airflow,mypy-dev,mypy-docs,mypy-providers", + "upgrade-to-newer-dependencies": "false", + "parallel-test-types-list-as-string": ALL_CI_SELECTIVE_TEST_TYPES, + "providers-test-types-list-as-string": ALL_PROVIDERS_SELECTIVE_TEST_TYPES, + "needs-mypy": "true", + "mypy-folders": "['airflow', 'providers', 'docs', 'dev']", + }, + id="All tests should be run when dev/tests_common/ change", + ) + ), ], ) def test_expected_output_pull_request_main( @@ -1124,7 +1150,7 @@ def test_full_test_needed_when_scripts_changes(files: tuple[str, ...], expected_ ), ( pytest.param( - ("INTHEWILD.md", "tests/providers/asana.py"), + ("INTHEWILD.md", "providers/tests/asana.py"), ("full tests needed",), "v2-7-stable", { @@ -1200,7 +1226,7 @@ def test_expected_output_full_tests_needed( pytest.param( ( "chart/aaaa.txt", - "tests/providers/google/file.py", + "providers/tests/google/file.py", ), { "affected-providers-list-as-string": "amazon apache.beam apache.cassandra cncf.kubernetes " @@ -1229,7 +1255,7 @@ def test_expected_output_full_tests_needed( ( "airflow/cli/test.py", "chart/aaaa.txt", - "tests/providers/google/file.py", + "providers/tests/google/file.py", ), { "affected-providers-list-as-string": "amazon apache.beam apache.cassandra " @@ -1257,7 +1283,7 @@ def test_expected_output_full_tests_needed( pytest.param( ( "airflow/file.py", - "tests/providers/google/file.py", + "providers/tests/google/file.py", ), { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, @@ -1347,7 +1373,7 @@ def test_expected_output_pull_request_v2_7( ( "airflow/cli/test.py", "chart/aaaa.txt", - "tests/providers/google/file.py", + "providers/tests/google/file.py", ), { "affected-providers-list-as-string": "amazon apache.beam apache.cassandra " @@ -1619,7 +1645,7 @@ def test_no_commit_provided_trigger_full_build_for_any_event_type(github_event): id="pyproject.toml changed but no dependency change", ), pytest.param( - ("airflow/providers/microsoft/azure/provider.yaml",), + ("providers/src/airflow/providers/microsoft/azure/provider.yaml",), { "upgrade-to-newer-dependencies": "false", }, @@ -1677,9 +1703,9 @@ def test_upgrade_to_newer_dependencies( id="Google provider docs changed", ), pytest.param( - ("airflow/providers/common/sql/common_sql_python.py",), + ("providers/src/airflow/providers/common/sql/common_sql_python.py",), { - "docs-list-as-string": "apache-airflow amazon apache.drill apache.druid apache.hive " + "docs-list-as-string": "amazon apache.drill apache.druid apache.hive " "apache.impala apache.pinot common.sql databricks elasticsearch " "exasol google jdbc microsoft.mssql mysql odbc openlineage " "oracle pgvector postgres presto slack snowflake sqlite teradata trino vertica ydb", @@ -1719,8 +1745,8 @@ def test_upgrade_to_newer_dependencies( id="Only Airflow docs changed", ), pytest.param( - ("airflow/providers/celery/file.py",), - {"docs-list-as-string": "apache-airflow celery cncf.kubernetes standard"}, + ("providers/src/airflow/providers/celery/file.py",), + {"docs-list-as-string": "celery cncf.kubernetes"}, id="Celery python files changed", ), pytest.param( @@ -2195,7 +2221,7 @@ def test_provider_compatibility_checks(labels: tuple[str, ...], expected_outputs id="Airflow mypy checks on airflow files with model changes.", ), pytest.param( - ("airflow/providers/a_file.py",), + ("providers/src/airflow/providers/a_file.py",), { "needs-mypy": "true", "mypy-folders": "['providers']", diff --git a/dev/example_dags/update_example_dags_paths.py b/dev/example_dags/update_example_dags_paths.py index 2104721d1bb2b..ac8cf4a0feec4 100755 --- a/dev/example_dags/update_example_dags_paths.py +++ b/dev/example_dags/update_example_dags_paths.py @@ -38,10 +38,10 @@ EXAMPLE_DAGS_URL_MATCHER = re.compile( - r"^(.*)(https://github.com/apache/airflow/tree/(.*)/airflow/providers/(.*)/example_dags)(/?\".*)$" + r"^(.*)(https://github.com/apache/airflow/tree/(.*)/providers/src/airflow/providers/(.*)/example_dags)(/?\".*)$" ) SYSTEM_TESTS_URL_MATCHER = re.compile( - r"^(.*)(https://github.com/apache/airflow/tree/(.*)/tests/system/providers/(.*))(/?\".*)$" + r"^(.*)(https://github.com/apache/airflow/tree/(.*)/providers/tests/system/(.*))(/?\".*)$" ) @@ -67,11 +67,11 @@ def replace_match(file: str, line: str, provider: str, version: str) -> str | No continue system_tests_url = ( f"https://github.com/apache/airflow/tree/providers-{provider}/{version}" - f"/tests/system/providers/{url_path_to_dir}" + f"/providers/tests/system/{url_path_to_dir}" ) example_dags_url = ( f"https://github.com/apache/airflow/tree/providers-{provider}/{version}" - f"/airflow/providers/{url_path_to_dir}/example_dags" + f"/providers/src/airflow/providers/{url_path_to_dir}/example_dags" ) if check_if_url_exists(system_tests_url) and index == 1: new_line = re.sub(matcher, r"\1" + system_tests_url + r"\5", line) diff --git a/dev/perf/scheduler_dag_execution_timing.py b/dev/perf/scheduler_dag_execution_timing.py index b11c73073df92..fc5c21bda7aa7 100755 --- a/dev/perf/scheduler_dag_execution_timing.py +++ b/dev/perf/scheduler_dag_execution_timing.py @@ -107,7 +107,7 @@ def get_executor_under_test(dotted_path): from airflow.executors.executor_loader import ExecutorLoader if dotted_path == "MockExecutor": - from tests.test_utils.mock_executor import MockExecutor as executor + from dev.tests_common.test_utils.mock_executor import MockExecutor as executor else: executor = ExecutorLoader.load_executor(dotted_path) diff --git a/scripts/ci/pre_commit/check_providers_init.py b/dev/tests_common/__init__.py old mode 100755 new mode 100644 similarity index 62% rename from scripts/ci/pre_commit/check_providers_init.py rename to dev/tests_common/__init__.py index 33def71253f34..bf2b7d4f3e2a4 --- a/scripts/ci/pre_commit/check_providers_init.py +++ b/dev/tests_common/__init__.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -18,14 +17,14 @@ # under the License. from __future__ import annotations -import sys +import os from pathlib import Path -AIRFLOW_SOURCES = Path(__file__).parents[3] -PROVIDERS_INIT_FILE = AIRFLOW_SOURCES / "airflow" / "providers" / "__init__.py" - -print(f"Checking if {PROVIDERS_INIT_FILE} exists.") -if PROVIDERS_INIT_FILE.exists(): - print(f"\033[0;31mERROR: {PROVIDERS_INIT_FILE} file should not exist. Deleting it.\033[0m\n") - PROVIDERS_INIT_FILE.unlink() - sys.exit(1) +# This constant is set to True if tests are run with Airflow installed from Packages rather than running +# the tests within Airflow sources. While most tests in CI are run using Airflow sources, there are +# also compatibility tests that only use `tests` package and run against installed packages of Airflow in +# for supported Airflow versions. +RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES = ( + "USE_AIRFLOW_VERSION" in os.environ + or not (Path(__file__).parents[2] / "airflow" / "__init__.py").exists() +) diff --git a/airflow/providers/airbyte/sensors/__init__.py b/dev/tests_common/_internals/__init__.py similarity index 100% rename from airflow/providers/airbyte/sensors/__init__.py rename to dev/tests_common/_internals/__init__.py diff --git a/tests/_internals/capture_warnings.py b/dev/tests_common/_internals/capture_warnings.py similarity index 100% rename from tests/_internals/capture_warnings.py rename to dev/tests_common/_internals/capture_warnings.py diff --git a/tests/_internals/forbidden_warnings.py b/dev/tests_common/_internals/forbidden_warnings.py similarity index 90% rename from tests/_internals/forbidden_warnings.py rename to dev/tests_common/_internals/forbidden_warnings.py index 324d2ff6f9824..ce24878510667 100644 --- a/tests/_internals/forbidden_warnings.py +++ b/dev/tests_common/_internals/forbidden_warnings.py @@ -17,32 +17,43 @@ from __future__ import annotations +import os +from collections.abc import Sequence from pathlib import Path import pytest import yaml -TESTS_DIR = Path(__file__).parents[1].resolve() - class ForbiddenWarningsPlugin: """Internal plugin for restricting warnings during the tests run.""" node_key: str = "forbidden_warnings_node" - deprecations_ignore: Path = (TESTS_DIR / "deprecations_ignore.yml").resolve(strict=True) + deprecations_ignore: Sequence[str | os.PathLike] def __init__(self, config: pytest.Config, forbidden_warnings: tuple[str, ...]): + # Set by a pytest_configure hook in conftest + deprecations_ignore = config.inicfg["airflow_deprecations_ignore"] + if isinstance(deprecations_ignore, (str, os.PathLike)): + self.deprecations_ignore = [deprecations_ignore] + else: + self.deprecations_ignore = deprecations_ignore + excluded_cases = { # Skip: Integration and System Tests "tests/integration/", "tests/system/", + "providers/tests/integration/", + "providers/tests/system/", # Skip: DAGs for tests "tests/dags/", "tests/dags_corrupted/", "tests/dags_with_system_exit/", } - with self.deprecations_ignore.open() as fp: - excluded_cases.update(yaml.safe_load(fp)) + for path in self.deprecations_ignore: + path = Path(path).resolve() + with path.open() as fp: + excluded_cases.update(yaml.safe_load(fp)) self.config = config self.forbidden_warnings = forbidden_warnings diff --git a/dev/tests_common/pyproject.toml b/dev/tests_common/pyproject.toml new file mode 100644 index 0000000000000..092ad7c7c5f5b --- /dev/null +++ b/dev/tests_common/pyproject.toml @@ -0,0 +1,38 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +[build-system] +requires = ["hatchling", "hatch-vcs"] +build-backend = "hatchling.build" + +[project] +name = "airflow-dev-tests-common" +version = "0.1.0" +description = "" +classifiers = [ + "Private :: Do Not Upload", +] + +[tool.hatch.publish.index] +# Lets make doubly sure this never goes to PyPi +disable = true + +[tool.hatch.build.targets.wheel] +include = ["**/*.py"] + +[tool.hatch.build.targets.wheel.sources] +"" = "dev/tests_common" diff --git a/dev/tests_common/pytest_plugin.py b/dev/tests_common/pytest_plugin.py new file mode 100644 index 0000000000000..5694fe1b3403f --- /dev/null +++ b/dev/tests_common/pytest_plugin.py @@ -0,0 +1,1436 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import json +import os +import platform +import re +import subprocess +import sys +from contextlib import ExitStack, suppress +from datetime import datetime, timedelta, timezone +from pathlib import Path +from typing import TYPE_CHECKING + +import pytest +import time_machine + +if TYPE_CHECKING: + from itsdangerous import URLSafeSerializer + + from dev.tests_common._internals.capture_warnings import CaptureWarningsPlugin # noqa: F401 + from dev.tests_common._internals.forbidden_warnings import ForbiddenWarningsPlugin # noqa: F401 + +# https://docs.pytest.org/en/stable/reference/reference.html#stash +capture_warnings_key = pytest.StashKey["CaptureWarningsPlugin"]() +forbidden_warnings_key = pytest.StashKey["ForbiddenWarningsPlugin"]() + +keep_env_variables = "--keep-env-variables" in sys.argv + +if not keep_env_variables: + # Clear all Environment Variables that might have side effect, + # For example, defined in /files/airflow-breeze-config/variables.env + _AIRFLOW_CONFIG_PATTERN = re.compile(r"^AIRFLOW__(.+)__(.+)$") + _KEEP_CONFIGS_SETTINGS: dict[str, dict[str, set[str]]] = { + # Keep always these configurations + "always": { + "database": {"sql_alchemy_conn"}, + "core": {"sql_alchemy_conn"}, + "celery": {"result_backend", "broker_url"}, + }, + # Keep per enabled integrations + "celery": {"celery": {"*"}, "celery_broker_transport_options": {"*"}}, + "kerberos": {"kerberos": {"*"}}, + } + if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true": + _KEEP_CONFIGS_SETTINGS["always"].update( + { + "core": { + "internal_api_url", + "fernet_key", + "database_access_isolation", + "internal_api_secret_key", + "internal_api_clock_grace", + }, + } + ) + _ENABLED_INTEGRATIONS = {e.split("_", 1)[-1].lower() for e in os.environ if e.startswith("INTEGRATION_")} + _KEEP_CONFIGS: dict[str, set[str]] = {} + for keep_settings_key in ("always", *_ENABLED_INTEGRATIONS): + if keep_settings := _KEEP_CONFIGS_SETTINGS.get(keep_settings_key): + for section, options in keep_settings.items(): + if section not in _KEEP_CONFIGS: + _KEEP_CONFIGS[section] = options + else: + _KEEP_CONFIGS[section].update(options) + for env_key in os.environ.copy(): + if m := _AIRFLOW_CONFIG_PATTERN.match(env_key): + section, option = m.group(1).lower(), m.group(2).lower() + if not (ko := _KEEP_CONFIGS.get(section)) or not ("*" in ko or option in ko): + del os.environ[env_key] + +SUPPORTED_DB_BACKENDS = ("sqlite", "postgres", "mysql") + +# A bit of a Hack - but we need to check args before they are parsed by pytest in order to +# configure the DB before Airflow gets initialized (which happens at airflow import time). +# Using env variables also handles the case, when python-xdist is used - python-xdist spawns separate +# processes and does not pass all args to them (it's done via env variables) so we are doing the +# same here and detect whether `--skip-db-tests` or `--run-db-tests-only` is passed to pytest +# and set env variables so the processes spawned by python-xdist can read the status from there +skip_db_tests = "--skip-db-tests" in sys.argv or os.environ.get("_AIRFLOW_SKIP_DB_TESTS") == "true" +run_db_tests_only = ( + "--run-db-tests-only" in sys.argv or os.environ.get("_AIRFLOW_RUN_DB_TESTS_ONLY") == "true" +) + +if skip_db_tests: + if run_db_tests_only: + raise Exception("You cannot specify both --skip-db-tests and --run-db-tests-only together") + # Make sure sqlalchemy will not be usable for pure unit tests even if initialized + os.environ["AIRFLOW__CORE__SQL_ALCHEMY_CONN"] = "bad_schema:///" + os.environ["AIRFLOW__DATABASE__SQL_ALCHEMY_CONN"] = "bad_schema:///" + os.environ["_IN_UNIT_TESTS"] = "true" + # Set it here to pass the flag to python-xdist spawned processes + os.environ["_AIRFLOW_SKIP_DB_TESTS"] = "true" + +if run_db_tests_only: + # Set it here to pass the flag to python-xdist spawned processes + os.environ["_AIRFLOW_RUN_DB_TESTS_ONLY"] = "true" + +_airflow_sources = os.getenv("AIRFLOW_SOURCES", None) +AIRFLOW_SOURCES_ROOT_DIR = ( + Path(_airflow_sources) if _airflow_sources else Path(__file__).parents[2] +).resolve() +AIRFLOW_TESTS_DIR = AIRFLOW_SOURCES_ROOT_DIR / "tests" + +os.environ["AIRFLOW__CORE__PLUGINS_FOLDER"] = os.fspath(AIRFLOW_TESTS_DIR / "plugins") +os.environ["AIRFLOW__CORE__DAGS_FOLDER"] = os.fspath(AIRFLOW_TESTS_DIR / "dags") +os.environ["AIRFLOW__CORE__UNIT_TEST_MODE"] = "True" +os.environ["AWS_DEFAULT_REGION"] = os.environ.get("AWS_DEFAULT_REGION") or "us-east-1" +os.environ["CREDENTIALS_DIR"] = os.environ.get("CREDENTIALS_DIR") or "/files/airflow-breeze-config/keys" +os.environ["AIRFLOW_ENABLE_AIP_44"] = os.environ.get("AIRFLOW_ENABLE_AIP_44") or "true" + +if platform.system() == "Darwin": + # mocks from unittest.mock work correctly in subprocesses only if they are created by "fork" method + # but macOS uses "spawn" by default + os.environ["AIRFLOW__CORE__MP_START_METHOD"] = "fork" + + +@pytest.fixture +def reset_db(): + """Resets Airflow db.""" + + from airflow.utils import db + + db.resetdb() + + +ALLOWED_TRACE_SQL_COLUMNS = ["num", "time", "trace", "sql", "parameters", "count"] + + +@pytest.fixture(autouse=True) +def trace_sql(request): + from dev.tests_common.test_utils.perf.perf_kit.sqlalchemy import ( # isort: skip + count_queries, + trace_queries, + ) + + """Displays queries from the tests to console.""" + trace_sql_option = request.config.option.trace_sql + if not trace_sql_option: + yield + return + + terminal_reporter = request.config.pluginmanager.getplugin("terminalreporter") + # if no terminal reporter plugin is present, nothing we can do here; + # this can happen when this function executes in a worker node + # when using pytest-xdist, for example + if terminal_reporter is None: + yield + return + + columns = [col.strip() for col in trace_sql_option.split(",")] + + def pytest_print(text): + return terminal_reporter.write_line(text) + + with ExitStack() as exit_stack: + if columns == ["num"]: + # It is very unlikely that the user wants to display only numbers, but probably + # the user just wants to count the queries. + exit_stack.enter_context(count_queries(print_fn=pytest_print)) + elif any(c in columns for c in ["time", "trace", "sql", "parameters"]): + exit_stack.enter_context( + trace_queries( + display_num="num" in columns, + display_time="time" in columns, + display_trace="trace" in columns, + display_sql="sql" in columns, + display_parameters="parameters" in columns, + print_fn=pytest_print, + ) + ) + + yield + + +@pytest.fixture(autouse=True, scope="session") +def set_db_isolation_mode(): + if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true": + from airflow.api_internal.internal_api_call import InternalApiConfig + + InternalApiConfig.set_use_internal_api("tests", allow_tests_to_use_db=True) + + +def skip_if_database_isolation_mode(item): + if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true": + for _ in item.iter_markers(name="skip_if_database_isolation_mode"): + pytest.skip("This test is skipped because it is not allowed in database isolation mode.") + + +def pytest_addoption(parser: pytest.Parser): + """Add options parser for custom plugins.""" + group = parser.getgroup("airflow") + group.addoption( + "--with-db-init", + action="store_true", + dest="db_init", + help="Forces database initialization before tests", + ) + group.addoption( + "--integration", + action="append", + dest="integration", + metavar="INTEGRATIONS", + help="only run tests matching integration specified: " + "[cassandra,kerberos,mongo,celery,statsd,trino]. ", + ) + group.addoption( + "--keep-env-variables", + action="store_true", + dest="keep_env_variables", + help="do not clear environment variables that might have side effect while running tests", + ) + group.addoption( + "--skip-db-tests", + action="store_true", + dest="skip_db_tests", + help="skip tests that require database", + ) + group.addoption( + "--run-db-tests-only", + action="store_true", + dest="run_db_tests_only", + help="only run tests requiring database", + ) + group.addoption( + "--backend", + action="store", + dest="backend", + metavar="BACKEND", + help="only run tests matching the backend: [sqlite,postgres,mysql].", + ) + group.addoption( + "--system", + action="append", + dest="system", + metavar="SYSTEMS", + help="only run tests matching the system specified [google.cloud, google.marketing_platform]", + ) + group.addoption( + "--include-long-running", + action="store_true", + dest="include_long_running", + help="Includes long running tests (marked with long_running marker). They are skipped by default.", + ) + group.addoption( + "--include-quarantined", + action="store_true", + dest="include_quarantined", + help="Includes quarantined tests (marked with quarantined marker). They are skipped by default.", + ) + group.addoption( + "--exclude-virtualenv-operator", + action="store_true", + dest="exclude_virtualenv_operator", + help="Excludes virtualenv operators tests (marked with virtualenv_test marker).", + ) + group.addoption( + "--exclude-external-python-operator", + action="store_true", + dest="exclude_external_python_operator", + help="Excludes external python operator tests (marked with external_python_test marker).", + ) + allowed_trace_sql_columns_list = ",".join(ALLOWED_TRACE_SQL_COLUMNS) + group.addoption( + "--trace-sql", + action="store", + dest="trace_sql", + help=( + "Trace SQL statements. As an argument, you must specify the columns to be " + f"displayed as a comma-separated list. Supported values: [f{allowed_trace_sql_columns_list}]" + ), + metavar="COLUMNS", + ) + group.addoption( + "--no-db-cleanup", + action="store_false", + dest="db_cleanup", + help="Disable DB clear before each test module.", + ) + group.addoption( + "--disable-forbidden-warnings", + action="store_true", + dest="disable_forbidden_warnings", + help="Disable raising an error if forbidden warnings detected.", + ) + group.addoption( + "--disable-capture-warnings", + action="store_true", + dest="disable_capture_warnings", + help="Disable internal capture warnings.", + ) + group.addoption( + "--warning-output-path", + action="store", + dest="warning_output_path", + metavar="PATH", + help=( + "Path for resulting captured warnings. Absolute or relative to the `tests` directory. " + "If not provided or environment variable `CAPTURE_WARNINGS_OUTPUT` not set " + "then 'warnings.txt' will be used." + ), + ) + parser.addini( + name="forbidden_warnings", + type="linelist", + help="List of internal Airflow warnings which are prohibited during tests execution.", + ) + + +@pytest.fixture(autouse=True, scope="session") +def initialize_airflow_tests(request): + """Helper that setups Airflow testing environment.""" + print(" AIRFLOW ".center(60, "=")) + + from dev.tests_common.test_utils.db import initial_db_init + + # Setup test environment for breeze + home = os.path.expanduser("~") + airflow_home = os.environ.get("AIRFLOW_HOME") or os.path.join(home, "airflow") + + print(f"Home of the user: {home}\nAirflow home {airflow_home}") + + # Initialize Airflow db if required + lock_file = os.path.join(airflow_home, ".airflow_db_initialised") + if not skip_db_tests: + if request.config.option.db_init: + from dev.tests_common.test_utils.db import initial_db_init + + print("Initializing the DB - forced with --with-db-init switch.") + initial_db_init() + elif not os.path.exists(lock_file): + print( + "Initializing the DB - first time after entering the container.\n" + "You can force re-initialization the database by adding --with-db-init switch to run-tests." + ) + initial_db_init() + # Create pid file + with open(lock_file, "w+"): + pass + else: + print( + "Skipping initializing of the DB as it was initialized already.\n" + "You can re-initialize the database by adding --with-db-init flag when running tests." + ) + integration_kerberos = os.environ.get("INTEGRATION_KERBEROS") + if integration_kerberos == "true": + # Initialize kerberos + kerberos = os.environ.get("KRB5_KTNAME") + if kerberos: + subprocess.check_call(["kinit", "-kt", kerberos, "bob@EXAMPLE.COM"]) + else: + print("Kerberos enabled! Please setup KRB5_KTNAME environment variable") + sys.exit(1) + + +def pytest_configure(config: pytest.Config) -> None: + # Ensure that the airflow sources dir is at the end of the sys path if it's not already there. Needed to + # run import from `providers/tests/` + desired = AIRFLOW_SOURCES_ROOT_DIR.as_posix() + for path in sys.path: + if path == desired: + break + else: + sys.path.append(desired) + + if (backend := config.getoption("backend", default=None)) and backend not in SUPPORTED_DB_BACKENDS: + msg = ( + f"Provided DB backend {backend!r} not supported, " + f"expected one of: {', '.join(map(repr, SUPPORTED_DB_BACKENDS))}" + ) + pytest.exit(msg, returncode=6) + + config.addinivalue_line("markers", "integration(name): mark test to run with named integration") + config.addinivalue_line("markers", "backend(name): mark test to run with named backend") + config.addinivalue_line("markers", "system(name): mark test to run with named system") + config.addinivalue_line("markers", "platform(name): mark test to run with specific platform/environment") + config.addinivalue_line("markers", "long_running: mark test that run for a long time (many minutes)") + config.addinivalue_line( + "markers", "quarantined: mark test that are in quarantine (i.e. flaky, need to be isolated and fixed)" + ) + config.addinivalue_line( + "markers", "credential_file(name): mark tests that require credential file in CREDENTIALS_DIR" + ) + config.addinivalue_line( + "markers", "need_serialized_dag: mark tests that require dags in serialized form to be present" + ) + config.addinivalue_line( + "markers", + "db_test: mark tests that require database to be present", + ) + config.addinivalue_line( + "markers", + "non_db_test_override: you can mark individual tests with this marker to override the db_test marker", + ) + config.addinivalue_line( + "markers", + "virtualenv_operator: virtualenv operator tests are 'long', we should run them separately", + ) + config.addinivalue_line( + "markers", + "external_python_operator: external python operator tests are 'long', we should run them separately", + ) + config.addinivalue_line("markers", "enable_redact: do not mock redact secret masker") + config.addinivalue_line("markers", "skip_if_database_isolation_mode: skip if DB isolation is enabled") + + os.environ["_AIRFLOW__SKIP_DATABASE_EXECUTOR_COMPATIBILITY_CHECK"] = "1" + + # Setup internal warnings plugins + if "ignore" in sys.warnoptions: + config.option.disable_forbidden_warnings = True + config.option.disable_capture_warnings = True + if not config.pluginmanager.get_plugin("warnings"): + # Internal forbidden warnings plugin depends on builtin pytest warnings plugin + config.option.disable_forbidden_warnings = True + + forbidden_warnings: list[str] | None = config.getini("forbidden_warnings") + if not config.option.disable_forbidden_warnings and forbidden_warnings: + from dev.tests_common._internals.forbidden_warnings import ForbiddenWarningsPlugin + + forbidden_warnings_plugin = ForbiddenWarningsPlugin( + config=config, + forbidden_warnings=tuple(map(str.strip, forbidden_warnings)), + ) + config.pluginmanager.register(forbidden_warnings_plugin) + config.stash[forbidden_warnings_key] = forbidden_warnings_plugin + + if not config.option.disable_capture_warnings: + from dev.tests_common._internals.capture_warnings import CaptureWarningsPlugin + + capture_warnings_plugin = CaptureWarningsPlugin( + config=config, output_path=config.getoption("warning_output_path", default=None) + ) + config.pluginmanager.register(capture_warnings_plugin) + config.stash[capture_warnings_key] = capture_warnings_plugin + + +def pytest_unconfigure(config: pytest.Config) -> None: + os.environ.pop("_AIRFLOW__SKIP_DATABASE_EXECUTOR_COMPATIBILITY_CHECK", None) + if forbidden_warnings_plugin := config.stash.get(forbidden_warnings_key, None): + del config.stash[forbidden_warnings_key] + config.pluginmanager.unregister(forbidden_warnings_plugin) + if capture_warnings_plugin := config.stash.get(capture_warnings_key, None): + del config.stash[capture_warnings_key] + config.pluginmanager.unregister(capture_warnings_plugin) + + +def skip_if_not_marked_with_integration(selected_integrations, item): + for marker in item.iter_markers(name="integration"): + integration_name = marker.args[0] + if integration_name in selected_integrations or "all" in selected_integrations: + return + pytest.skip( + f"The test is skipped because it does not have the right integration marker. " + f"Only tests marked with pytest.mark.integration(INTEGRATION) are run with INTEGRATION " + f"being one of {selected_integrations}. {item}" + ) + + +def skip_if_not_marked_with_backend(selected_backend, item): + for marker in item.iter_markers(name="backend"): + backend_names = marker.args + if selected_backend in backend_names: + return + pytest.skip( + f"The test is skipped because it does not have the right backend marker. " + f"Only tests marked with pytest.mark.backend('{selected_backend}') are run: {item}" + ) + + +def skip_if_platform_doesnt_match(marker): + allowed_platforms = ("linux", "breeze") + if not (args := marker.args): + pytest.fail(f"No platform specified, expected one of: {', '.join(map(repr, allowed_platforms))}") + elif not all(a in allowed_platforms for a in args): + pytest.fail( + f"Allowed platforms {', '.join(map(repr, allowed_platforms))}; " + f"but got: {', '.join(map(repr, args))}" + ) + if "linux" in args: + if not sys.platform.startswith("linux"): + pytest.skip("Test expected to run on Linux platform.") + if "breeze" in args: + if not os.path.isfile("/.dockerenv") or os.environ.get("BREEZE", "").lower() != "true": + raise pytest.skip( + "Test expected to run into Airflow Breeze container. " + "Maybe because it is to dangerous to run it outside." + ) + + +def skip_if_not_marked_with_system(selected_systems, item): + for marker in item.iter_markers(name="system"): + systems_name = marker.args[0] + if systems_name in selected_systems or "all" in selected_systems: + return + pytest.skip( + f"The test is skipped because it does not have the right system marker. " + f"Only tests marked with pytest.mark.system(SYSTEM) are run with SYSTEM " + f"being one of {selected_systems}. {item}" + ) + + +def skip_system_test(item): + for marker in item.iter_markers(name="system"): + pytest.skip( + f"The test is skipped because it has system marker. System tests are only run when " + f"--system flag with the right system ({marker.args[0]}) is passed to pytest. {item}" + ) + + +def skip_long_running_test(item): + for _ in item.iter_markers(name="long_running"): + pytest.skip( + f"The test is skipped because it has long_running marker. " + f"And --include-long-running flag is not passed to pytest. {item}" + ) + + +def skip_quarantined_test(item): + for _ in item.iter_markers(name="quarantined"): + pytest.skip( + f"The test is skipped because it has quarantined marker. " + f"And --include-quarantined flag is not passed to pytest. {item}" + ) + + +def skip_virtualenv_operator_test(item): + for _ in item.iter_markers(name="virtualenv_operator"): + pytest.skip( + f"The test is skipped because it has virtualenv_operator marker. " + f"And --exclude-virtualenv-operator flag is not passed to pytest. {item}" + ) + + +def skip_external_python_operator_test(item): + for _ in item.iter_markers(name="external_python_operator"): + pytest.skip( + f"The test is skipped because it has external_python_operator marker. " + f"And --exclude-external-python-operator flag is not passed to pytest. {item}" + ) + + +def skip_db_test(item): + if next(item.iter_markers(name="db_test"), None): + if next(item.iter_markers(name="non_db_test_override"), None): + # non_db_test can override the db_test set for example on module or class level + return + else: + pytest.skip( + f"The test is skipped as it is DB test " + f"and --skip-db-tests is flag is passed to pytest. {item}" + ) + if next(item.iter_markers(name="backend"), None): + # also automatically skip tests marked with `backend` marker as they are implicitly + # db tests + pytest.skip( + f"The test is skipped as it is DB test " + f"and --skip-db-tests is flag is passed to pytest. {item}" + ) + + +def only_run_db_test(item): + if next(item.iter_markers(name="db_test"), None) and not next( + item.iter_markers(name="non_db_test_override"), None + ): + # non_db_test at individual level can override the db_test set for example on module or class level + return + else: + if next(item.iter_markers(name="backend"), None): + # Also do not skip the tests marked with `backend` marker - as it is implicitly a db test + return + pytest.skip( + f"The test is skipped as it is not a DB tests " + f"and --run-db-tests-only flag is passed to pytest. {item}" + ) + + +def skip_if_integration_disabled(marker, item): + integration_name = marker.args[0] + environment_variable_name = "INTEGRATION_" + integration_name.upper() + environment_variable_value = os.environ.get(environment_variable_name) + if not environment_variable_value or environment_variable_value != "true": + pytest.skip( + f"The test requires {integration_name} integration started and " + f"{environment_variable_name} environment variable to be set to true (it is '{environment_variable_value}')." + f" It can be set by specifying '--integration {integration_name}' at breeze startup" + f": {item}" + ) + + +def skip_if_wrong_backend(marker: pytest.Mark, item: pytest.Item) -> None: + if not (backend_names := marker.args): + reason = ( + "`pytest.mark.backend` expect to get at least one of the following backends: " + f"{', '.join(map(repr, SUPPORTED_DB_BACKENDS))}." + ) + pytest.fail(reason) + elif unsupported_backends := list(filter(lambda b: b not in SUPPORTED_DB_BACKENDS, backend_names)): + reason = ( + "Airflow Tests supports only the following backends in `pytest.mark.backend` marker: " + f"{', '.join(map(repr, SUPPORTED_DB_BACKENDS))}, " + f"but got {', '.join(map(repr, unsupported_backends))}." + ) + pytest.fail(reason) + + env_name = "BACKEND" + if not (backend := os.environ.get(env_name)) or backend not in backend_names: + reason = ( + f"The test {item.nodeid!r} requires one of {', '.join(map(repr, backend_names))} backend started " + f"and {env_name!r} environment variable to be set (currently it set to {backend!r}). " + f"It can be set by specifying backend at breeze startup." + ) + pytest.skip(reason) + + +def skip_if_credential_file_missing(item): + for marker in item.iter_markers(name="credential_file"): + credential_file = marker.args[0] + credential_path = os.path.join(os.environ.get("CREDENTIALS_DIR"), credential_file) + if not os.path.exists(credential_path): + pytest.skip(f"The test requires credential file {credential_path}: {item}") + + +def pytest_runtest_setup(item): + selected_integrations_list = item.config.option.integration + selected_systems_list = item.config.option.system + + include_long_running = item.config.option.include_long_running + include_quarantined = item.config.option.include_quarantined + exclude_virtualenv_operator = item.config.option.exclude_virtualenv_operator + exclude_external_python_operator = item.config.option.exclude_external_python_operator + + for marker in item.iter_markers(name="integration"): + skip_if_integration_disabled(marker, item) + if selected_integrations_list: + skip_if_not_marked_with_integration(selected_integrations_list, item) + if selected_systems_list: + skip_if_not_marked_with_system(selected_systems_list, item) + else: + skip_system_test(item) + for marker in item.iter_markers(name="platform"): + skip_if_platform_doesnt_match(marker) + for marker in item.iter_markers(name="backend"): + skip_if_wrong_backend(marker, item) + skip_if_database_isolation_mode(item) + selected_backend = item.config.option.backend + if selected_backend: + skip_if_not_marked_with_backend(selected_backend, item) + if not include_long_running: + skip_long_running_test(item) + if not include_quarantined: + skip_quarantined_test(item) + if exclude_virtualenv_operator: + skip_virtualenv_operator_test(item) + if exclude_external_python_operator: + skip_external_python_operator_test(item) + if skip_db_tests: + skip_db_test(item) + if run_db_tests_only: + only_run_db_test(item) + skip_if_credential_file_missing(item) + + +@pytest.fixture +def frozen_sleep(monkeypatch): + """Use time-machine to "stub" sleep. + + This means the ``sleep()`` takes no time, but ``datetime.now()`` appears to move forwards. + + If your module under test does ``import time`` and then ``time.sleep``: + + .. code-block:: python + + def test_something(frozen_sleep): + my_mod.fn_under_test() + + If your module under test does ``from time import sleep`` then you will + have to mock that sleep function directly: + + .. code-block:: python + + def test_something(frozen_sleep, monkeypatch): + monkeypatch.setattr("my_mod.sleep", frozen_sleep) + my_mod.fn_under_test() + """ + traveller = None + + def fake_sleep(seconds): + nonlocal traveller + utcnow = datetime.now(tz=timezone.utc) + if traveller is not None: + traveller.stop() + traveller = time_machine.travel(utcnow + timedelta(seconds=seconds)) + traveller.start() + + monkeypatch.setattr("time.sleep", fake_sleep) + yield fake_sleep + + if traveller is not None: + traveller.stop() + + +@pytest.fixture +def dag_maker(request): + """Fixture to help create DAG, DagModel, and SerializedDAG automatically. + + You have to use the dag_maker as a context manager and it takes + the same argument as DAG:: + + with dag_maker(dag_id="mydag") as dag: + task1 = EmptyOperator(task_id="mytask") + task2 = EmptyOperator(task_id="mytask2") + + If the DagModel you want to use needs different parameters than the one + automatically created by the dag_maker, you have to update the DagModel as below:: + + dag_maker.dag_model.is_active = False + session.merge(dag_maker.dag_model) + session.commit() + + For any test you use the dag_maker, make sure to create a DagRun:: + + dag_maker.create_dagrun() + + The dag_maker.create_dagrun takes the same arguments as dag.create_dagrun + + If you want to operate on serialized DAGs, then either pass + ``serialized=True`` to the ``dag_maker()`` call, or you can mark your + test/class/file with ``@pytest.mark.need_serialized_dag(True)``. In both of + these cases the ``dag`` returned by the context manager will be a + lazily-evaluated proxy object to the SerializedDAG. + """ + import lazy_object_proxy + + # IMPORTANT: Delay _all_ imports from `airflow.*` to _inside a method_. + # This fixture is "called" early on in the pytest collection process, and + # if we import airflow.* here the wrong (non-test) config will be loaded + # and "baked" in to various constants + + want_serialized = False + + # Allow changing default serialized behaviour with `@pytest.mark.need_serialized_dag` or + # `@pytest.mark.need_serialized_dag(False)` + serialized_marker = request.node.get_closest_marker("need_serialized_dag") + if serialized_marker: + (want_serialized,) = serialized_marker.args or (True,) + + from airflow.utils.log.logging_mixin import LoggingMixin + + class DagFactory(LoggingMixin): + _own_session = False + + def __init__(self): + from airflow.models import DagBag + + # Keep all the serialized dags we've created in this test + self.dagbag = DagBag(os.devnull, include_examples=False, read_dags_from_db=False) + + def __enter__(self): + self.dag.__enter__() + if self.want_serialized: + return lazy_object_proxy.Proxy(self._serialized_dag) + return self.dag + + def _serialized_dag(self): + return self.serialized_model.dag + + def get_serialized_data(self): + try: + data = self.serialized_model.data + except AttributeError: + raise RuntimeError("DAG serialization not requested") + if isinstance(data, str): + return json.loads(data) + return data + + def _bag_dag_compat(self, dag): + # This is a compatibility shim for the old bag_dag method in Airflow <3.0 + # TODO: Remove this when we drop support for Airflow <3.0 in Providers + if hasattr(dag, "parent_dag"): + return self.dagbag.bag_dag(dag, root_dag=dag) + return self.dagbag.bag_dag(dag) + + def __exit__(self, type, value, traceback): + from airflow.models import DagModel + from airflow.models.serialized_dag import SerializedDagModel + + dag = self.dag + dag.__exit__(type, value, traceback) + if type is not None: + return + + dag.clear(session=self.session) + dag.sync_to_db(processor_subdir=self.processor_subdir, session=self.session) + self.dag_model = self.session.get(DagModel, dag.dag_id) + + if self.want_serialized: + self.serialized_model = SerializedDagModel( + dag, processor_subdir=self.dag_model.processor_subdir + ) + self.session.merge(self.serialized_model) + serialized_dag = self._serialized_dag() + self._bag_dag_compat(serialized_dag) + self.session.flush() + else: + self._bag_dag_compat(self.dag) + + def create_dagrun(self, **kwargs): + from airflow.utils import timezone + from airflow.utils.state import State + from airflow.utils.types import DagRunType + + from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS + + if AIRFLOW_V_3_0_PLUS: + from airflow.utils.types import DagRunTriggeredByType + + dag = self.dag + kwargs = { + "state": State.RUNNING, + "start_date": self.start_date, + "session": self.session, + **kwargs, + } + # Need to provide run_id if the user does not either provide one + # explicitly, or pass run_type for inference in dag.create_dagrun(). + if "run_id" not in kwargs and "run_type" not in kwargs: + kwargs["run_id"] = "test" + + if "run_type" not in kwargs: + kwargs["run_type"] = DagRunType.from_run_id(kwargs["run_id"]) + if kwargs.get("execution_date") is None: + if kwargs["run_type"] == DagRunType.MANUAL: + kwargs["execution_date"] = self.start_date + else: + kwargs["execution_date"] = dag.next_dagrun_info(None).logical_date + if "data_interval" not in kwargs: + logical_date = timezone.coerce_datetime(kwargs["execution_date"]) + if kwargs["run_type"] == DagRunType.MANUAL: + data_interval = dag.timetable.infer_manual_data_interval(run_after=logical_date) + else: + data_interval = dag.infer_automated_data_interval(logical_date) + kwargs["data_interval"] = data_interval + if AIRFLOW_V_3_0_PLUS and "triggered_by" not in kwargs: + kwargs["triggered_by"] = DagRunTriggeredByType.TEST + + self.dag_run = dag.create_dagrun(**kwargs) + for ti in self.dag_run.task_instances: + ti.refresh_from_task(dag.get_task(ti.task_id)) + if self.want_serialized: + self.session.commit() + return self.dag_run + + def create_dagrun_after(self, dagrun, **kwargs): + next_info = self.dag.next_dagrun_info(self.dag.get_run_data_interval(dagrun)) + if next_info is None: + raise ValueError(f"cannot create run after {dagrun}") + return self.create_dagrun( + execution_date=next_info.logical_date, + data_interval=next_info.data_interval, + **kwargs, + ) + + def __call__( + self, + dag_id="test_dag", + schedule=timedelta(days=1), + serialized=want_serialized, + fileloc=None, + processor_subdir=None, + session=None, + **kwargs, + ): + from airflow import settings + from airflow.models.dag import DAG + from airflow.utils import timezone + + if session is None: + self._own_session = True + session = settings.Session() + + self.kwargs = kwargs + self.session = session + self.start_date = self.kwargs.get("start_date", None) + default_args = kwargs.get("default_args", None) + if default_args and not self.start_date: + if "start_date" in default_args: + self.start_date = default_args.get("start_date") + if not self.start_date: + if hasattr(request.module, "DEFAULT_DATE"): + self.start_date = getattr(request.module, "DEFAULT_DATE") + else: + DEFAULT_DATE = timezone.datetime(2016, 1, 1) + self.start_date = DEFAULT_DATE + self.kwargs["start_date"] = self.start_date + # Set schedule argument to explicitly set value, or a default if no + # other scheduling arguments are set. + self.dag = DAG(dag_id, schedule=schedule, **self.kwargs) + self.dag.fileloc = fileloc or request.module.__file__ + self.want_serialized = serialized + self.processor_subdir = processor_subdir + + return self + + def cleanup(self): + from airflow.models import DagModel, DagRun, TaskInstance, XCom + from airflow.models.serialized_dag import SerializedDagModel + from airflow.models.taskmap import TaskMap + from airflow.utils.retries import run_with_db_retries + + from dev.tests_common.test_utils.compat import AssetEvent + + for attempt in run_with_db_retries(logger=self.log): + with attempt: + dag_ids = list(self.dagbag.dag_ids) + if not dag_ids: + return + # To isolate problems here with problems from elsewhere on the session object + self.session.rollback() + + self.session.query(SerializedDagModel).filter( + SerializedDagModel.dag_id.in_(dag_ids) + ).delete(synchronize_session=False) + self.session.query(DagRun).filter(DagRun.dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.query(TaskInstance).filter(TaskInstance.dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.query(XCom).filter(XCom.dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.query(DagModel).filter(DagModel.dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.query(TaskMap).filter(TaskMap.dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.query(AssetEvent).filter(AssetEvent.source_dag_id.in_(dag_ids)).delete( + synchronize_session=False, + ) + self.session.commit() + if self._own_session: + self.session.expunge_all() + + factory = DagFactory() + + try: + yield factory + finally: + factory.cleanup() + with suppress(AttributeError): + del factory.session + + +@pytest.fixture +def create_dummy_dag(dag_maker): + """Create a `DAG` with a single `EmptyOperator` task. + + DagRun and DagModel is also created. + + Apart from the already existing arguments, any other argument in kwargs + is passed to the DAG and not to the EmptyOperator task. + + If you have an argument that you want to pass to the EmptyOperator that + is not here, please use `default_args` so that the DAG will pass it to the + Task:: + + dag, task = create_dummy_dag(default_args={"start_date": timezone.datetime(2016, 1, 1)}) + + You cannot be able to alter the created DagRun or DagModel, use `dag_maker` fixture instead. + """ + from airflow.operators.empty import EmptyOperator + from airflow.utils.types import DagRunType + + def create_dag( + dag_id="dag", + task_id="op1", + task_display_name=None, + max_active_tis_per_dag=16, + max_active_tis_per_dagrun=None, + pool="default_pool", + executor_config=None, + trigger_rule="all_done", + on_success_callback=None, + on_execute_callback=None, + on_failure_callback=None, + on_retry_callback=None, + email=None, + with_dagrun_type=DagRunType.SCHEDULED, + **kwargs, + ): + op_kwargs = {} + from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS + + if AIRFLOW_V_2_9_PLUS: + op_kwargs["task_display_name"] = task_display_name + with dag_maker(dag_id, **kwargs) as dag: + op = EmptyOperator( + task_id=task_id, + max_active_tis_per_dag=max_active_tis_per_dag, + max_active_tis_per_dagrun=max_active_tis_per_dagrun, + executor_config=executor_config or {}, + on_success_callback=on_success_callback, + on_execute_callback=on_execute_callback, + on_failure_callback=on_failure_callback, + on_retry_callback=on_retry_callback, + email=email, + pool=pool, + trigger_rule=trigger_rule, + **op_kwargs, + ) + if with_dagrun_type is not None: + dag_maker.create_dagrun(run_type=with_dagrun_type) + return dag, op + + return create_dag + + +if TYPE_CHECKING: + from airflow.models.taskinstance import TaskInstance + + +@pytest.fixture +def create_task_instance(dag_maker, create_dummy_dag): + """Create a TaskInstance, and associated DB rows (DagRun, DagModel, etc). + + Uses ``create_dummy_dag`` to create the dag structure. + """ + from airflow.operators.empty import EmptyOperator + + def maker( + execution_date=None, + dagrun_state=None, + state=None, + run_id=None, + run_type=None, + data_interval=None, + external_executor_id=None, + dag_id="dag", + task_id="op1", + task_display_name=None, + max_active_tis_per_dag=16, + max_active_tis_per_dagrun=None, + pool="default_pool", + executor_config=None, + trigger_rule="all_done", + on_success_callback=None, + on_execute_callback=None, + on_failure_callback=None, + on_retry_callback=None, + email=None, + map_index=-1, + **kwargs, + ) -> TaskInstance: + from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS + + if AIRFLOW_V_3_0_PLUS: + from airflow.utils.types import DagRunTriggeredByType + + if execution_date is None: + from airflow.utils import timezone + + execution_date = timezone.utcnow() + with dag_maker(dag_id, **kwargs): + op_kwargs = {} + from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS + + if AIRFLOW_V_2_9_PLUS: + op_kwargs["task_display_name"] = task_display_name + task = EmptyOperator( + task_id=task_id, + max_active_tis_per_dag=max_active_tis_per_dag, + max_active_tis_per_dagrun=max_active_tis_per_dagrun, + executor_config=executor_config or {}, + on_success_callback=on_success_callback, + on_execute_callback=on_execute_callback, + on_failure_callback=on_failure_callback, + on_retry_callback=on_retry_callback, + email=email, + pool=pool, + trigger_rule=trigger_rule, + **op_kwargs, + ) + + dagrun_kwargs = { + "execution_date": execution_date, + "state": dagrun_state, + } + dagrun_kwargs.update({"triggered_by": DagRunTriggeredByType.TEST} if AIRFLOW_V_3_0_PLUS else {}) + if run_id is not None: + dagrun_kwargs["run_id"] = run_id + if run_type is not None: + dagrun_kwargs["run_type"] = run_type + if data_interval is not None: + dagrun_kwargs["data_interval"] = data_interval + dagrun = dag_maker.create_dagrun(**dagrun_kwargs) + (ti,) = dagrun.task_instances + ti.task = task + ti.state = state + ti.external_executor_id = external_executor_id + ti.map_index = map_index + + dag_maker.session.flush() + return ti + + return maker + + +@pytest.fixture +def create_serialized_task_instance_of_operator(dag_maker): + def _create_task_instance( + operator_class, + *, + dag_id, + execution_date=None, + session=None, + **operator_kwargs, + ) -> TaskInstance: + with dag_maker(dag_id=dag_id, serialized=True, session=session): + operator_class(**operator_kwargs) + if execution_date is None: + dagrun_kwargs = {} + else: + dagrun_kwargs = {"execution_date": execution_date} + (ti,) = dag_maker.create_dagrun(**dagrun_kwargs).task_instances + return ti + + return _create_task_instance + + +@pytest.fixture +def create_task_instance_of_operator(dag_maker): + def _create_task_instance( + operator_class, + *, + dag_id, + execution_date=None, + session=None, + **operator_kwargs, + ) -> TaskInstance: + with dag_maker(dag_id=dag_id, session=session, serialized=True): + operator_class(**operator_kwargs) + if execution_date is None: + dagrun_kwargs = {} + else: + dagrun_kwargs = {"execution_date": execution_date} + (ti,) = dag_maker.create_dagrun(**dagrun_kwargs).task_instances + return ti + + return _create_task_instance + + +@pytest.fixture +def create_task_of_operator(dag_maker): + def _create_task_of_operator(operator_class, *, dag_id, session=None, **operator_kwargs): + with dag_maker(dag_id=dag_id, session=session): + task = operator_class(**operator_kwargs) + return task + + return _create_task_of_operator + + +@pytest.fixture +def session(): + from airflow.utils.session import create_session + + with create_session() as session: + yield session + session.rollback() + + +@pytest.fixture +def get_test_dag(): + def _get(dag_id: str): + from airflow.models.dagbag import DagBag + from airflow.models.serialized_dag import SerializedDagModel + + dag_file = AIRFLOW_TESTS_DIR / "dags" / f"{dag_id}.py" + dagbag = DagBag(dag_folder=dag_file, include_examples=False) + + dag = dagbag.get_dag(dag_id) + dag.sync_to_db() + SerializedDagModel.write_dag(dag) + + return dag + + return _get + + +@pytest.fixture +def create_log_template(request): + from airflow import settings + from airflow.models.tasklog import LogTemplate + + session = settings.Session() + + def _create_log_template(filename_template, elasticsearch_id=""): + log_template = LogTemplate(filename=filename_template, elasticsearch_id=elasticsearch_id) + session.add(log_template) + session.commit() + + def _delete_log_template(): + from airflow.models import DagRun, TaskInstance + + session.query(TaskInstance).delete() + session.query(DagRun).delete() + session.delete(log_template) + session.commit() + + request.addfinalizer(_delete_log_template) + + return _create_log_template + + +@pytest.fixture +def reset_logging_config(): + import logging.config + + from airflow import settings + from airflow.utils.module_loading import import_string + + logging_config = import_string(settings.LOGGING_CLASS_PATH) + logging.config.dictConfig(logging_config) + + +@pytest.fixture(scope="session", autouse=True) +def suppress_info_logs_for_dag_and_fab(): + import logging + + from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS + + dag_logger = logging.getLogger("airflow.models.dag") + dag_logger.setLevel(logging.WARNING) + + if AIRFLOW_V_2_9_PLUS: + fab_logger = logging.getLogger("airflow.providers.fab.auth_manager.security_manager.override") + fab_logger.setLevel(logging.WARNING) + else: + fab_logger = logging.getLogger("airflow.www.fab_security") + fab_logger.setLevel(logging.WARNING) + + +@pytest.fixture(scope="module", autouse=True) +def _clear_db(request): + """Clear DB before each test module run.""" + from dev.tests_common.test_utils.db import clear_all, initial_db_init + + if not request.config.option.db_cleanup: + return + if skip_db_tests: + return + from airflow.configuration import conf + + sql_alchemy_conn = conf.get("database", "sql_alchemy_conn") + if sql_alchemy_conn.startswith("sqlite"): + sql_alchemy_file = sql_alchemy_conn.replace("sqlite:///", "") + if not os.path.exists(sql_alchemy_file): + print(f"The sqlite file `{sql_alchemy_file}` does not exist. Attempt to initialize it.") + initial_db_init() + + dist_option = getattr(request.config.option, "dist", "no") + if dist_option != "no" or hasattr(request.config, "workerinput"): + # Skip if pytest-xdist detected (controller or worker) + return + try: + clear_all() + except Exception as ex: + exc_name_parts = [type(ex).__name__] + exc_module = type(ex).__module__ + if exc_module != "builtins": + exc_name_parts.insert(0, exc_module) + extra_msg = "" if request.config.option.db_init else ", try to run with flag --with-db-init" + pytest.exit(f"Unable clear test DB{extra_msg}, got error {'.'.join(exc_name_parts)}: {ex}") + + +@pytest.fixture(autouse=True) +def clear_lru_cache(): + from airflow.executors.executor_loader import ExecutorLoader + from airflow.utils.entry_points import _get_grouped_entry_points + + ExecutorLoader.validate_database_executor_compatibility.cache_clear() + try: + _get_grouped_entry_points.cache_clear() + try: + yield + finally: + _get_grouped_entry_points.cache_clear() + finally: + ExecutorLoader.validate_database_executor_compatibility.cache_clear() + + +@pytest.fixture(autouse=True) +def refuse_to_run_test_from_wrongly_named_files(request: pytest.FixtureRequest): + filepath = request.node.path + is_system_test: bool = "tests/system/" in os.fspath(filepath) + test_name = request.node.name + if request.node.cls: + test_name = f"{request.node.cls.__name__}.{test_name}" + if is_system_test and not filepath.name.startswith(("example_", "test_")): + pytest.fail( + f"All test method files in tests/system must start with 'example_' or 'test_'. " + f"Seems that {os.fspath(filepath)!r} contains {test_name!r} that looks like a test case. " + f"Please rename the file to follow the example_* or test_* pattern if you want to run the tests " + f"in it." + ) + elif not is_system_test and not filepath.name.startswith("test_"): + pytest.fail( + f"All test method files in tests/ must start with 'test_'. Seems that {os.fspath(filepath)!r} " + f"contains {test_name!r} that looks like a test case. Please rename the file to " + f"follow the test_* pattern if you want to run the tests in it." + ) + + +@pytest.fixture(autouse=True) +def initialize_providers_manager(): + from airflow.providers_manager import ProvidersManager + + ProvidersManager().initialize_providers_configuration() + + +@pytest.fixture(autouse=True) +def close_all_sqlalchemy_sessions(): + from sqlalchemy.orm import close_all_sessions + + with suppress(Exception): + close_all_sessions() + yield + with suppress(Exception): + close_all_sessions() + + +@pytest.fixture +def cleanup_providers_manager(): + from airflow.providers_manager import ProvidersManager + + ProvidersManager()._cleanup() + ProvidersManager().initialize_providers_configuration() + try: + yield + finally: + ProvidersManager()._cleanup() + + +@pytest.fixture(autouse=True) +def _disable_redact(request: pytest.FixtureRequest, mocker): + """Disable redacted text in tests, except specific.""" + from airflow import settings + + if next(request.node.iter_markers("enable_redact"), None): + with pytest.MonkeyPatch.context() as mp_ctx: + mp_ctx.setattr(settings, "MASK_SECRETS_IN_LOGS", True) + yield + return + + mocked_redact = mocker.patch("airflow.utils.log.secrets_masker.SecretsMasker.redact") + mocked_redact.side_effect = lambda item, name=None, max_depth=None: item + with pytest.MonkeyPatch.context() as mp_ctx: + mp_ctx.setattr(settings, "MASK_SECRETS_IN_LOGS", False) + yield + return + + +@pytest.fixture +def providers_src_folder() -> Path: + import airflow.providers + + return Path(airflow.providers.__path__[0]).parents[1] + + +@pytest.fixture +def hook_lineage_collector(): + from airflow.lineage import hook + + hook._hook_lineage_collector = None + hook._hook_lineage_collector = hook.HookLineageCollector() + yield hook.get_hook_lineage_collector() + hook._hook_lineage_collector = None + + +@pytest.fixture +def clean_dags_and_dagruns(): + """Fixture that cleans the database before and after every test.""" + from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs + + clear_db_runs() + clear_db_dags() + yield # Test runs here + clear_db_dags() + clear_db_runs() + + +@pytest.fixture(scope="session") +def app(): + from dev.tests_common.test_utils.config import conf_vars + + with conf_vars({("fab", "auth_rate_limited"): "False"}): + from airflow.www import app + + yield app.create_app(testing=True) + + +@pytest.fixture +def secret_key() -> str: + """Return secret key configured.""" + from airflow.configuration import conf + + the_key = conf.get("webserver", "SECRET_KEY") + if the_key is None: + raise RuntimeError( + "The secret key SHOULD be configured as `[webserver] secret_key` in the " + "configuration/environment at this stage! " + ) + return the_key + + +@pytest.fixture +def url_safe_serializer(secret_key) -> URLSafeSerializer: + from itsdangerous import URLSafeSerializer + + return URLSafeSerializer(secret_key) diff --git a/tests/test_utils/README.md b/dev/tests_common/test_utils/README.md similarity index 67% rename from tests/test_utils/README.md rename to dev/tests_common/test_utils/README.md index cdde5b83a59bd..2f5e694be5e3a 100644 --- a/tests/test_utils/README.md +++ b/dev/tests_common/test_utils/README.md @@ -15,6 +15,14 @@ KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ---> + --> + + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Utilities for use in tests.](#utilities-for-use-in-tests) + + # Utilities for use in tests. diff --git a/tests/system/providers/google/marketing_platform/__init__.py b/dev/tests_common/test_utils/__init__.py similarity index 87% rename from tests/system/providers/google/marketing_platform/__init__.py rename to dev/tests_common/test_utils/__init__.py index 217e5db960782..e440178fae6c7 100644 --- a/tests/system/providers/google/marketing_platform/__init__.py +++ b/dev/tests_common/test_utils/__init__.py @@ -15,3 +15,8 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from __future__ import annotations + +from pathlib import Path + +AIRFLOW_MAIN_FOLDER = Path(__file__).parents[3] diff --git a/tests/test_utils/api_connexion_utils.py b/dev/tests_common/test_utils/api_connexion_utils.py similarity index 93% rename from tests/test_utils/api_connexion_utils.py rename to dev/tests_common/test_utils/api_connexion_utils.py index 48869ee48078d..2d273af2e6878 100644 --- a/tests/test_utils/api_connexion_utils.py +++ b/dev/tests_common/test_utils/api_connexion_utils.py @@ -20,7 +20,8 @@ from typing import TYPE_CHECKING from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP -from tests.test_utils.compat import ignore_provider_compatibility_error + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.security_manager.override import EXISTING_ROLES @@ -48,11 +49,7 @@ def create_user_scope(app, username, **kwargs): It will create a user and provide it for the fixture via YIELD (generator) then will tidy up once test is complete """ - from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( - create_user as create_user_fab, - ) - - test_user = create_user_fab(app, username, **kwargs) + test_user = create_user(app, username, **kwargs) try: yield test_user diff --git a/tests/test_utils/asserts.py b/dev/tests_common/test_utils/asserts.py similarity index 100% rename from tests/test_utils/asserts.py rename to dev/tests_common/test_utils/asserts.py diff --git a/tests/test_utils/azure_system_helpers.py b/dev/tests_common/test_utils/azure_system_helpers.py similarity index 97% rename from tests/test_utils/azure_system_helpers.py rename to dev/tests_common/test_utils/azure_system_helpers.py index 033399235a294..b1a46617c082f 100644 --- a/tests/test_utils/azure_system_helpers.py +++ b/dev/tests_common/test_utils/azure_system_helpers.py @@ -28,8 +28,9 @@ from airflow.models import Connection from airflow.providers.microsoft.azure.hooks.fileshare import AzureFileShareHook from airflow.utils.process_utils import patch_environ -from tests.test_utils import AIRFLOW_MAIN_FOLDER -from tests.test_utils.system_tests_class import SystemTest + +from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from dev.tests_common.test_utils.system_tests_class import SystemTest AZURE_DAG_FOLDER = os.path.join( AIRFLOW_MAIN_FOLDER, "airflow", "providers", "microsoft", "azure", "example_dags" diff --git a/tests/test_utils/compat.py b/dev/tests_common/test_utils/compat.py similarity index 100% rename from tests/test_utils/compat.py rename to dev/tests_common/test_utils/compat.py diff --git a/tests/test_utils/config.py b/dev/tests_common/test_utils/config.py similarity index 100% rename from tests/test_utils/config.py rename to dev/tests_common/test_utils/config.py diff --git a/tests/test_utils/db.py b/dev/tests_common/test_utils/db.py similarity index 89% rename from tests/test_utils/db.py rename to dev/tests_common/test_utils/db.py index a5dd94e2d009d..4b2dede05ead7 100644 --- a/tests/test_utils/db.py +++ b/dev/tests_common/test_utils/db.py @@ -17,6 +17,8 @@ # under the License. from __future__ import annotations +import os + from airflow.jobs.job import Job from airflow.models import ( Connection, @@ -42,7 +44,8 @@ from airflow.security.permissions import RESOURCE_DAG_PREFIX from airflow.utils.db import add_default_pool_if_not_exists, create_default_connections, reflect_tables from airflow.utils.session import create_session -from tests.test_utils.compat import ( + +from dev.tests_common.test_utils.compat import ( AIRFLOW_V_2_10_PLUS, AssetDagRunQueue, AssetEvent, @@ -53,6 +56,26 @@ ) +def initial_db_init(): + from flask import Flask + + from airflow.configuration import conf + from airflow.utils import db + from airflow.www.extensions.init_appbuilder import init_appbuilder + from airflow.www.extensions.init_auth_manager import get_auth_manager + + from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS + + db.resetdb() + db.bootstrap_dagbag() + # minimal app to add roles + flask_app = Flask(__name__) + flask_app.config["SQLALCHEMY_DATABASE_URI"] = conf.get("database", "SQL_ALCHEMY_CONN") + init_appbuilder(flask_app) + if AIRFLOW_V_2_8_PLUS: + get_auth_manager().init() + + def clear_db_runs(): with create_session() as session: session.query(Job).delete() @@ -83,7 +106,7 @@ def clear_db_assets(): session.query(DagScheduleAssetReference).delete() session.query(TaskOutletAssetReference).delete() if AIRFLOW_V_2_10_PLUS: - from tests.test_utils.compat import AssetAliasModel + from dev.tests_common.test_utils.compat import AssetAliasModel session.query(AssetAliasModel).delete() @@ -250,3 +273,7 @@ def clear_all(): clear_db_pools() clear_db_connections(add_default_connections_back=True) clear_dag_specific_permissions() + + +def is_db_isolation_mode(): + return os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true" diff --git a/tests/test_utils/decorators.py b/dev/tests_common/test_utils/decorators.py similarity index 100% rename from tests/test_utils/decorators.py rename to dev/tests_common/test_utils/decorators.py diff --git a/tests/test_utils/fake_datetime.py b/dev/tests_common/test_utils/fake_datetime.py similarity index 100% rename from tests/test_utils/fake_datetime.py rename to dev/tests_common/test_utils/fake_datetime.py diff --git a/tests/test_utils/gcp_system_helpers.py b/dev/tests_common/test_utils/gcp_system_helpers.py similarity index 90% rename from tests/test_utils/gcp_system_helpers.py rename to dev/tests_common/test_utils/gcp_system_helpers.py index 0e681d64c49b4..e17679bd8eb56 100644 --- a/tests/test_utils/gcp_system_helpers.py +++ b/dev/tests_common/test_utils/gcp_system_helpers.py @@ -20,6 +20,7 @@ import os import tempfile from contextlib import contextmanager +from pathlib import Path from tempfile import TemporaryDirectory from typing import Sequence from unittest import mock @@ -27,27 +28,20 @@ import pytest from google.auth.environment_vars import CLOUD_SDK_CONFIG_DIR, CREDENTIALS +import airflow.providers.google from airflow.providers.google.cloud.utils.credentials_provider import provide_gcp_conn_and_credentials -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_GCS_KEY, GCP_SECRET_MANAGER_KEY -from tests.test_utils import AIRFLOW_MAIN_FOLDER -from tests.test_utils.logging_command_executor import CommandExecutor -from tests.test_utils.system_tests_class import SystemTest -CLOUD_DAG_FOLDER = os.path.join( - AIRFLOW_MAIN_FOLDER, "airflow", "providers", "google", "cloud", "example_dags" -) -MARKETING_DAG_FOLDER = os.path.join( - AIRFLOW_MAIN_FOLDER, "airflow", "providers", "google", "marketing_platform", "example_dags" -) -GSUITE_DAG_FOLDER = os.path.join( - AIRFLOW_MAIN_FOLDER, "airflow", "providers", "google", "suite", "example_dags" -) -FIREBASE_DAG_FOLDER = os.path.join( - AIRFLOW_MAIN_FOLDER, "airflow", "providers", "google", "firebase", "example_dags" -) -LEVELDB_DAG_FOLDER = os.path.join( - AIRFLOW_MAIN_FOLDER, "airflow", "providers", "google", "leveldb", "example_dags" -) +from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from dev.tests_common.test_utils.logging_command_executor import CommandExecutor +from dev.tests_common.test_utils.system_tests_class import SystemTest +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_GCS_KEY, GCP_SECRET_MANAGER_KEY + +GCP_DIR = Path(airflow.providers.google.__file__).parent +CLOUD_DAG_FOLDER = GCP_DIR.joinpath("cloud", "example_dags") +MARKETING_DAG_FOLDER = GCP_DIR.joinpath("marketing_platform", "example_dags") +GSUITE_DAG_FOLDER = GCP_DIR.joinpath("suite", "example_dags") +FIREBASE_DAG_FOLDER = GCP_DIR.joinpath("firebase", "example_dags") +LEVELDB_DAG_FOLDER = GCP_DIR.joinpath("leveldb", "example_dags") POSTGRES_LOCAL_EXECUTOR = os.path.join( AIRFLOW_MAIN_FOLDER, "tests", "test_utils", "postgres_local_executor.cfg" ) diff --git a/tests/test_utils/get_all_tests.py b/dev/tests_common/test_utils/get_all_tests.py similarity index 100% rename from tests/test_utils/get_all_tests.py rename to dev/tests_common/test_utils/get_all_tests.py diff --git a/tests/test_utils/hdfs_utils.py b/dev/tests_common/test_utils/hdfs_utils.py similarity index 100% rename from tests/test_utils/hdfs_utils.py rename to dev/tests_common/test_utils/hdfs_utils.py diff --git a/tests/test_utils/logging_command_executor.py b/dev/tests_common/test_utils/logging_command_executor.py similarity index 100% rename from tests/test_utils/logging_command_executor.py rename to dev/tests_common/test_utils/logging_command_executor.py diff --git a/tests/test_utils/mapping.py b/dev/tests_common/test_utils/mapping.py similarity index 100% rename from tests/test_utils/mapping.py rename to dev/tests_common/test_utils/mapping.py diff --git a/tests/test_utils/mock_executor.py b/dev/tests_common/test_utils/mock_executor.py similarity index 100% rename from tests/test_utils/mock_executor.py rename to dev/tests_common/test_utils/mock_executor.py diff --git a/tests/test_utils/mock_operators.py b/dev/tests_common/test_utils/mock_operators.py similarity index 98% rename from tests/test_utils/mock_operators.py rename to dev/tests_common/test_utils/mock_operators.py index cd816707a59f5..0df0afec824c3 100644 --- a/tests/test_utils/mock_operators.py +++ b/dev/tests_common/test_utils/mock_operators.py @@ -23,7 +23,8 @@ from airflow.models.baseoperator import BaseOperator from airflow.models.xcom import XCom -from tests.test_utils.compat import BaseOperatorLink + +from dev.tests_common.test_utils.compat import BaseOperatorLink if TYPE_CHECKING: import jinja2 diff --git a/tests/test_utils/mock_plugins.py b/dev/tests_common/test_utils/mock_plugins.py similarity index 100% rename from tests/test_utils/mock_plugins.py rename to dev/tests_common/test_utils/mock_plugins.py diff --git a/tests/test_utils/mock_security_manager.py b/dev/tests_common/test_utils/mock_security_manager.py similarity index 92% rename from tests/test_utils/mock_security_manager.py rename to dev/tests_common/test_utils/mock_security_manager.py index d95d077a9882e..6b9f45e3d8410 100644 --- a/tests/test_utils/mock_security_manager.py +++ b/dev/tests_common/test_utils/mock_security_manager.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0", __file__): from airflow.providers.fab.auth_manager.security_manager.override import FabAirflowSecurityManagerOverride diff --git a/airflow/providers/airbyte/triggers/__init__.py b/dev/tests_common/test_utils/operators/__init__.py similarity index 100% rename from airflow/providers/airbyte/triggers/__init__.py rename to dev/tests_common/test_utils/operators/__init__.py diff --git a/tests/test_utils/operators/postgres_local_executor.cfg b/dev/tests_common/test_utils/operators/postgres_local_executor.cfg similarity index 100% rename from tests/test_utils/operators/postgres_local_executor.cfg rename to dev/tests_common/test_utils/operators/postgres_local_executor.cfg diff --git a/airflow/providers/alibaba/cloud/log/__init__.py b/dev/tests_common/test_utils/perf/__init__.py similarity index 100% rename from airflow/providers/alibaba/cloud/log/__init__.py rename to dev/tests_common/test_utils/perf/__init__.py diff --git a/tests/test_utils/perf/perf_kit/__init__.py b/dev/tests_common/test_utils/perf/perf_kit/__init__.py similarity index 100% rename from tests/test_utils/perf/perf_kit/__init__.py rename to dev/tests_common/test_utils/perf/perf_kit/__init__.py diff --git a/tests/test_utils/perf/perf_kit/memory.py b/dev/tests_common/test_utils/perf/perf_kit/memory.py similarity index 100% rename from tests/test_utils/perf/perf_kit/memory.py rename to dev/tests_common/test_utils/perf/perf_kit/memory.py diff --git a/tests/test_utils/perf/perf_kit/python.py b/dev/tests_common/test_utils/perf/perf_kit/python.py similarity index 100% rename from tests/test_utils/perf/perf_kit/python.py rename to dev/tests_common/test_utils/perf/perf_kit/python.py diff --git a/tests/test_utils/perf/perf_kit/repeat_and_time.py b/dev/tests_common/test_utils/perf/perf_kit/repeat_and_time.py similarity index 100% rename from tests/test_utils/perf/perf_kit/repeat_and_time.py rename to dev/tests_common/test_utils/perf/perf_kit/repeat_and_time.py diff --git a/tests/test_utils/perf/perf_kit/sqlalchemy.py b/dev/tests_common/test_utils/perf/perf_kit/sqlalchemy.py similarity index 100% rename from tests/test_utils/perf/perf_kit/sqlalchemy.py rename to dev/tests_common/test_utils/perf/perf_kit/sqlalchemy.py diff --git a/tests/test_utils/permissions.py b/dev/tests_common/test_utils/permissions.py similarity index 100% rename from tests/test_utils/permissions.py rename to dev/tests_common/test_utils/permissions.py diff --git a/tests/test_utils/providers.py b/dev/tests_common/test_utils/providers.py similarity index 100% rename from tests/test_utils/providers.py rename to dev/tests_common/test_utils/providers.py diff --git a/tests/test_utils/remote_user_api_auth_backend.py b/dev/tests_common/test_utils/remote_user_api_auth_backend.py similarity index 100% rename from tests/test_utils/remote_user_api_auth_backend.py rename to dev/tests_common/test_utils/remote_user_api_auth_backend.py diff --git a/tests/test_utils/reset_warning_registry.py b/dev/tests_common/test_utils/reset_warning_registry.py similarity index 100% rename from tests/test_utils/reset_warning_registry.py rename to dev/tests_common/test_utils/reset_warning_registry.py diff --git a/tests/test_utils/salesforce_system_helpers.py b/dev/tests_common/test_utils/salesforce_system_helpers.py similarity index 100% rename from tests/test_utils/salesforce_system_helpers.py rename to dev/tests_common/test_utils/salesforce_system_helpers.py diff --git a/tests/test_utils/sftp_system_helpers.py b/dev/tests_common/test_utils/sftp_system_helpers.py similarity index 100% rename from tests/test_utils/sftp_system_helpers.py rename to dev/tests_common/test_utils/sftp_system_helpers.py diff --git a/tests/system/utils/__init__.py b/dev/tests_common/test_utils/system_tests.py similarity index 100% rename from tests/system/utils/__init__.py rename to dev/tests_common/test_utils/system_tests.py diff --git a/tests/test_utils/system_tests_class.py b/dev/tests_common/test_utils/system_tests_class.py similarity index 97% rename from tests/test_utils/system_tests_class.py rename to dev/tests_common/test_utils/system_tests_class.py index cfd72174e1778..836782b8584c9 100644 --- a/tests/test_utils/system_tests_class.py +++ b/dev/tests_common/test_utils/system_tests_class.py @@ -29,8 +29,9 @@ from airflow.configuration import AIRFLOW_HOME, AirflowConfigParser, get_airflow_config from airflow.exceptions import AirflowException from airflow.models.dagbag import DagBag -from tests.test_utils import AIRFLOW_MAIN_FOLDER -from tests.test_utils.logging_command_executor import get_executor + +from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from dev.tests_common.test_utils.logging_command_executor import get_executor DEFAULT_DAG_FOLDER = os.path.join(AIRFLOW_MAIN_FOLDER, "airflow", "example_dags") diff --git a/tests/test_utils/terraform.py b/dev/tests_common/test_utils/terraform.py similarity index 95% rename from tests/test_utils/terraform.py rename to dev/tests_common/test_utils/terraform.py index bbb68b60c1a8b..b600ef6643b19 100644 --- a/tests/test_utils/terraform.py +++ b/dev/tests_common/test_utils/terraform.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from tests.test_utils.system_tests_class import SystemTest +from dev.tests_common.test_utils.system_tests_class import SystemTest class Terraform(SystemTest): diff --git a/tests/test_utils/timetables.py b/dev/tests_common/test_utils/timetables.py similarity index 100% rename from tests/test_utils/timetables.py rename to dev/tests_common/test_utils/timetables.py diff --git a/tests/system/utils/watcher.py b/dev/tests_common/test_utils/watcher.py similarity index 100% rename from tests/system/utils/watcher.py rename to dev/tests_common/test_utils/watcher.py diff --git a/tests/test_utils/www.py b/dev/tests_common/test_utils/www.py similarity index 100% rename from tests/test_utils/www.py rename to dev/tests_common/test_utils/www.py diff --git a/docs/apache-airflow-providers-airbyte/changelog.rst b/docs/apache-airflow-providers-airbyte/changelog.rst index 2e7bb99a7f6b5..6ac2033101e62 100644 --- a/docs/apache-airflow-providers-airbyte/changelog.rst +++ b/docs/apache-airflow-providers-airbyte/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/airbyte/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/airbyte/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-airbyte/index.rst b/docs/apache-airflow-providers-airbyte/index.rst index a967e36ff4faa..c4aff1b11543f 100644 --- a/docs/apache-airflow-providers-airbyte/index.rst +++ b/docs/apache-airflow-providers-airbyte/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/airbyte/index> + System Tests <_api/tests/system/airbyte/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-airbyte/operators/airbyte.rst b/docs/apache-airflow-providers-airbyte/operators/airbyte.rst index d07301dc42b84..45632be1e3d9e 100644 --- a/docs/apache-airflow-providers-airbyte/operators/airbyte.rst +++ b/docs/apache-airflow-providers-airbyte/operators/airbyte.rst @@ -47,14 +47,14 @@ This Operator will initiate the Airbyte job, and the Operator manages the job st An example using the synchronous way: -.. exampleinclude:: /../../tests/system/providers/airbyte/example_airbyte_trigger_job.py +.. exampleinclude:: /../../providers/tests/system/airbyte/example_airbyte_trigger_job.py :language: python :start-after: [START howto_operator_airbyte_synchronous] :end-before: [END howto_operator_airbyte_synchronous] An example using the async way: -.. exampleinclude:: /../../tests/system/providers/airbyte/example_airbyte_trigger_job.py +.. exampleinclude:: /../../providers/tests/system/airbyte/example_airbyte_trigger_job.py :language: python :start-after: [START howto_operator_airbyte_asynchronous] :end-before: [END howto_operator_airbyte_asynchronous] diff --git a/docs/apache-airflow-providers-alibaba/changelog.rst b/docs/apache-airflow-providers-alibaba/changelog.rst index 5cb1e797e2ae3..f4c779dbf5011 100644 --- a/docs/apache-airflow-providers-alibaba/changelog.rst +++ b/docs/apache-airflow-providers-alibaba/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/alibaba/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/alibaba/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-alibaba/index.rst b/docs/apache-airflow-providers-alibaba/index.rst index ab2b244861b16..4529cd7c02bb3 100644 --- a/docs/apache-airflow-providers-alibaba/index.rst +++ b/docs/apache-airflow-providers-alibaba/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/alibaba/index> + System Tests <_api/tests/system/alibaba/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-alibaba/operators/analyticdb_spark.rst b/docs/apache-airflow-providers-alibaba/operators/analyticdb_spark.rst index ac3f0638ad24c..952eb6c2c6037 100644 --- a/docs/apache-airflow-providers-alibaba/operators/analyticdb_spark.rst +++ b/docs/apache-airflow-providers-alibaba/operators/analyticdb_spark.rst @@ -39,7 +39,7 @@ Defining tasks In the following code we submit Spark Pi and Spark Logistic regression applications. -.. exampleinclude:: /../../tests/system/providers/alibaba/example_adb_spark_batch.py +.. exampleinclude:: /../../providers/tests/system/alibaba/example_adb_spark_batch.py :language: python :start-after: [START howto_operator_adb_spark_batch] :end-before: [END howto_operator_adb_spark_batch] diff --git a/docs/apache-airflow-providers-alibaba/operators/oss.rst b/docs/apache-airflow-providers-alibaba/operators/oss.rst index d09949be0be06..17d3829e37427 100644 --- a/docs/apache-airflow-providers-alibaba/operators/oss.rst +++ b/docs/apache-airflow-providers-alibaba/operators/oss.rst @@ -45,7 +45,7 @@ Defining tasks In the following code we create a new bucket and then delete the bucket. -.. exampleinclude:: /../../tests/system/providers/alibaba/example_oss_bucket.py +.. exampleinclude:: /../../providers/tests/system/alibaba/example_oss_bucket.py :language: python :start-after: [START howto_operator_oss_bucket] :end-before: [END howto_operator_oss_bucket] diff --git a/docs/apache-airflow-providers-amazon/changelog.rst b/docs/apache-airflow-providers-amazon/changelog.rst index abd12152cfaeb..8138c8db39b24 100644 --- a/docs/apache-airflow-providers-amazon/changelog.rst +++ b/docs/apache-airflow-providers-amazon/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/amazon/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/amazon/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-amazon/example-dags.rst b/docs/apache-airflow-providers-amazon/example-dags.rst index c01939ad25afd..2762597cb4ba3 100644 --- a/docs/apache-airflow-providers-amazon/example-dags.rst +++ b/docs/apache-airflow-providers-amazon/example-dags.rst @@ -20,4 +20,4 @@ Example DAGs You can learn how to use Amazon AWS integrations by analyzing the source code of the example DAGs: -* `Amazon AWS `__ +* `Amazon AWS `__ diff --git a/docs/apache-airflow-providers-amazon/index.rst b/docs/apache-airflow-providers-amazon/index.rst index 6d354b1a5f894..870bbe5626329 100644 --- a/docs/apache-airflow-providers-amazon/index.rst +++ b/docs/apache-airflow-providers-amazon/index.rst @@ -57,7 +57,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/amazon/index> + System Tests <_api/tests/system/amazon/index> System Tests Dashboard .. toctree:: diff --git a/docs/apache-airflow-providers-amazon/operators/appflow.rst b/docs/apache-airflow-providers-amazon/operators/appflow.rst index c28cfb001b5a0..d54d79df75cbb 100644 --- a/docs/apache-airflow-providers-amazon/operators/appflow.rst +++ b/docs/apache-airflow-providers-amazon/operators/appflow.rst @@ -49,7 +49,7 @@ Run Flow To run an AppFlow flow keeping as is, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_appflow_run.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow_run.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run] @@ -66,7 +66,7 @@ Run Flow Full To run an AppFlow flow removing all filters, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunFullOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run_full] @@ -83,7 +83,7 @@ Run Flow Daily To run an AppFlow flow filtering daily records, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunDailyOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run_daily] @@ -100,7 +100,7 @@ Run Flow Before To run an AppFlow flow filtering future records and selecting the past ones, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunBeforeOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run_before] @@ -117,7 +117,7 @@ Run Flow After To run an AppFlow flow filtering past records and selecting the future ones, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRunAfterOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_run_after] @@ -134,7 +134,7 @@ Skipping Tasks For Empty Runs To skip tasks when some AppFlow run return zero records, use: :class:`~airflow.providers.amazon.aws.operators.appflow.AppflowRecordsShortCircuitOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_appflow.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_appflow.py :language: python :dedent: 4 :start-after: [START howto_operator_appflow_shortcircuit] diff --git a/docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst b/docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst index e789290a6ba0f..295da63f9170a 100644 --- a/docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst +++ b/docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst @@ -48,9 +48,9 @@ to run a query in Amazon Athena. In the following example, we query an existing Athena table and send the results to an existing Amazon S3 bucket. For more examples of how to use this operator, please -see the `Sample DAG `__. +see the `Sample DAG `__. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_athena.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_athena.py :language: python :start-after: [START howto_operator_athena] :dedent: 4 @@ -67,7 +67,7 @@ Wait on Amazon Athena query results Use the :class:`~airflow.providers.amazon.aws.sensors.athena.AthenaSensor` to wait for the results of a query in Amazon Athena. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_athena.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_athena.py :language: python :start-after: [START howto_sensor_athena] :dedent: 4 diff --git a/docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst b/docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst index c29f89063fcdd..9f92e17e010e9 100644 --- a/docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst +++ b/docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst @@ -41,7 +41,7 @@ The generic ``SQLExecuteQueryOperator`` can be used to execute SQL queries again To execute a single SQL query against an Amazon Athena without bringing back the results to Airflow, please use ``AthenaOperator`` instead. -.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_execute_query.py +.. exampleinclude:: /../../providers/tests/system/common/sql/example_sql_execute_query.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_execute_query] @@ -51,7 +51,7 @@ Also, if you need to do simple data quality tests with Amazon Athena, you can us The below example demonstrates how to instantiate the SQLTableCheckOperator task. -.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_column_table_check.py +.. exampleinclude:: /../../providers/tests/system/common/sql/example_sql_column_table_check.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_table_check] diff --git a/docs/apache-airflow-providers-amazon/operators/batch.rst b/docs/apache-airflow-providers-amazon/operators/batch.rst index 4cc2a2b0cced4..efb71fafe8043 100644 --- a/docs/apache-airflow-providers-amazon/operators/batch.rst +++ b/docs/apache-airflow-providers-amazon/operators/batch.rst @@ -40,7 +40,7 @@ Submit a new AWS Batch job To submit a new AWS Batch job and monitor it until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.operators.batch.BatchOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_batch] @@ -54,7 +54,7 @@ Create an AWS Batch compute environment To create a new AWS Batch compute environment you can use :class:`~airflow.providers.amazon.aws.operators.batch.BatchCreateComputeEnvironmentOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_batch_create_compute_environment] @@ -71,7 +71,7 @@ Wait on an AWS Batch job state To wait on the state of an AWS Batch Job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.batch.BatchSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_sensor_batch] @@ -94,7 +94,7 @@ Wait on an AWS Batch compute environment status To wait on the status of an AWS Batch compute environment until it reaches a terminal status you can use :class:`~airflow.providers.amazon.aws.sensors.batch.BatchComputeEnvironmentSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_sensor_batch_compute_environment] @@ -108,7 +108,7 @@ Wait on an AWS Batch job queue status To wait on the status of an AWS Batch job queue until it reaches a terminal status you can use :class:`~airflow.providers.amazon.aws.sensors.batch.BatchJobQueueSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_batch.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_batch.py :language: python :dedent: 4 :start-after: [START howto_sensor_batch_job_queue] diff --git a/docs/apache-airflow-providers-amazon/operators/bedrock.rst b/docs/apache-airflow-providers-amazon/operators/bedrock.rst index daf9301565171..6a2af21f2ff91 100644 --- a/docs/apache-airflow-providers-amazon/operators/bedrock.rst +++ b/docs/apache-airflow-providers-amazon/operators/bedrock.rst @@ -52,7 +52,7 @@ for details on the different formats, see For example, to invoke a Meta Llama model you would use: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_operator_invoke_llama_model] @@ -60,7 +60,7 @@ For example, to invoke a Meta Llama model you would use: To invoke an Amazon Titan model you would use: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_operator_invoke_titan_model] @@ -68,7 +68,7 @@ To invoke an Amazon Titan model you would use: To invoke a Claude V2 model using the Completions API you would use: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_invoke_claude_model] @@ -90,7 +90,7 @@ and the training/validation data size. To monitor the state of the job, you can or the :class:`~airflow.providers.amazon.aws.triggers.BedrockCustomizeModelCompletedTrigger` Trigger. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_operator_customize_model] @@ -112,7 +112,7 @@ or the :class:`~airflow.providers.amazon.aws.triggers.BedrockProvisionModelThrou Trigger. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_operator_provision_throughput] @@ -129,7 +129,7 @@ To create an Amazon Bedrock Knowledge Base, you can use For more information on which models support embedding data into a vector store, see https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base-supported.html -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_create_knowledge_base] @@ -142,7 +142,7 @@ Delete an Amazon Bedrock Knowledge Base Deleting a Knowledge Base is a simple boto API call and can be done in a TaskFlow task like the example below. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :start-after: [START howto_operator_bedrock_delete_knowledge_base] :end-before: [END howto_operator_bedrock_delete_knowledge_base] @@ -155,7 +155,7 @@ Create an Amazon Bedrock Data Source To create an Amazon Bedrock Data Source, you can use :class:`~airflow.providers.amazon.aws.operators.bedrock.BedrockCreateDataSourceOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_create_data_source] @@ -168,7 +168,7 @@ Delete an Amazon Bedrock Data Source Deleting a Data Source is a simple boto API call and can be done in a TaskFlow task like the example below. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :start-after: [START howto_operator_bedrock_delete_data_source] :end-before: [END howto_operator_bedrock_delete_data_source] @@ -181,7 +181,7 @@ Ingest data into an Amazon Bedrock Data Source To add data from an Amazon S3 bucket into an Amazon Bedrock Data Source, you can use :class:`~airflow.providers.amazon.aws.operators.bedrock.BedrockIngestDataOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_ingest_data] @@ -201,7 +201,7 @@ would like to pass the results through an LLM in order to generate a text respon For more information on which models support retrieving information from a knowledge base, see https://docs.aws.amazon.com/bedrock/latest/userguide/knowledge-base-supported.html -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_retrieve] @@ -223,7 +223,7 @@ NOTE: Support for "external sources" was added in boto 1.34.90 Example using an Amazon Bedrock Knowledge Base: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_knowledge_base_rag] @@ -231,7 +231,7 @@ Example using an Amazon Bedrock Knowledge Base: Example using a PDF file in an Amazon S3 Bucket: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_operator_bedrock_external_sources_rag] @@ -249,7 +249,7 @@ Wait for an Amazon Bedrock customize model job To wait on the state of an Amazon Bedrock customize model job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.BedrockCustomizeModelCompletedSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_sensor_customize_model] @@ -264,7 +264,7 @@ To wait on the state of an Amazon Bedrock provision model throughput job until i terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.BedrockProvisionModelThroughputCompletedSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock.py :language: python :dedent: 4 :start-after: [START howto_sensor_provision_throughput] @@ -278,7 +278,7 @@ Wait for an Amazon Bedrock Knowledge Base To wait on the state of an Amazon Bedrock Knowledge Base until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.BedrockKnowledgeBaseActiveSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_sensor_bedrock_knowledge_base_active] @@ -292,7 +292,7 @@ Wait for an Amazon Bedrock ingestion job to finish To wait on the state of an Amazon Bedrock data ingestion job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.BedrockIngestionJobSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_sensor_bedrock_ingest_data] diff --git a/docs/apache-airflow-providers-amazon/operators/cloudformation.rst b/docs/apache-airflow-providers-amazon/operators/cloudformation.rst index ff45efcdb645e..40ab46bcf732e 100644 --- a/docs/apache-airflow-providers-amazon/operators/cloudformation.rst +++ b/docs/apache-airflow-providers-amazon/operators/cloudformation.rst @@ -47,7 +47,7 @@ Create an AWS CloudFormation stack To create a new AWS CloudFormation stack use :class:`~airflow.providers.amazon.aws.operators.cloud_formation.CloudFormationCreateStackOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_cloudformation.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_cloudformation.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudformation_create_stack] @@ -61,7 +61,7 @@ Delete an AWS CloudFormation stack To delete an AWS CloudFormation stack you can use :class:`~airflow.providers.amazon.aws.operators.cloud_formation.CloudFormationDeleteStackOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_cloudformation.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_cloudformation.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudformation_delete_stack] @@ -78,7 +78,7 @@ Wait on an AWS CloudFormation stack creation state To wait on the state of an AWS CloudFormation stack creation until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.cloud_formation.CloudFormationCreateStackSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_cloudformation.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_cloudformation.py :language: python :dedent: 4 :start-after: [START howto_sensor_cloudformation_create_stack] @@ -92,7 +92,7 @@ Wait on an AWS CloudFormation stack deletion state To wait on the state of an AWS CloudFormation stack deletion until it reaches a terminal state you can use use :class:`~airflow.providers.amazon.aws.sensors.cloud_formation.CloudFormationDeleteStackSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_cloudformation.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_cloudformation.py :language: python :dedent: 4 :start-after: [START howto_sensor_cloudformation_delete_stack] diff --git a/docs/apache-airflow-providers-amazon/operators/comprehend.rst b/docs/apache-airflow-providers-amazon/operators/comprehend.rst index af6ed023fe05e..c273643651bbd 100644 --- a/docs/apache-airflow-providers-amazon/operators/comprehend.rst +++ b/docs/apache-airflow-providers-amazon/operators/comprehend.rst @@ -44,7 +44,7 @@ Create an Amazon Comprehend Start PII Entities Detection Job To create an Amazon Comprehend Start PII Entities Detection Job, you can use :class:`~airflow.providers.amazon.aws.operators.comprehend.ComprehendStartPiiEntitiesDetectionJobOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_comprehend.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_comprehend.py :language: python :dedent: 4 :start-after: [START howto_operator_start_pii_entities_detection_job] @@ -58,7 +58,7 @@ Create an Amazon Comprehend Document Classifier To create an Amazon Comprehend Document Classifier, you can use :class:`~airflow.providers.amazon.aws.operators.comprehend.ComprehendCreateDocumentClassifierOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_comprehend_document_classifier.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_comprehend_document_classifier.py :language: python :dedent: 4 :start-after: [START howto_operator_create_document_classifier] @@ -76,7 +76,7 @@ To wait on the state of an Amazon Comprehend Start PII Entities Detection Job un state you can use :class:`~airflow.providers.amazon.aws.sensors.comprehend.ComprehendStartPiiEntitiesDetectionJobCompletedSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_comprehend.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_comprehend.py :language: python :dedent: 4 :start-after: [START howto_sensor_start_pii_entities_detection_job] @@ -91,7 +91,7 @@ To wait on the state of an Amazon Comprehend Document Classifier until it reache state you can use :class:`~airflow.providers.amazon.aws.sensors.comprehend.ComprehendCreateDocumentClassifierCompletedSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_comprehend_document_classifier.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_comprehend_document_classifier.py :language: python :dedent: 4 :start-after: [START howto_sensor_create_document_classifier] diff --git a/docs/apache-airflow-providers-amazon/operators/datasync.rst b/docs/apache-airflow-providers-amazon/operators/datasync.rst index 16e65db42dbdc..26db89022b5de 100644 --- a/docs/apache-airflow-providers-amazon/operators/datasync.rst +++ b/docs/apache-airflow-providers-amazon/operators/datasync.rst @@ -64,7 +64,7 @@ Execute a task To execute a specific task, you can pass the ``task_arn`` to the operator. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_datasync.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_datasync.py :language: python :dedent: 4 :start-after: [START howto_operator_datasync_specific_task] @@ -78,7 +78,7 @@ If one task is found, this one will be executed. If more than one task is found, the operator will raise an Exception. To avoid this, you can set ``allow_random_task_choice`` to ``True`` to randomly choose from candidate tasks. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_datasync.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_datasync.py :language: python :dedent: 4 :start-after: [START howto_operator_datasync_search_task] @@ -97,7 +97,7 @@ existing Task was found. If these are left to their default value (None) then no Also, because ``delete_task_after_execution`` is set to ``True``, the task will be deleted from AWS DataSync after it completes successfully. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_datasync.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_datasync.py :language: python :dedent: 4 :start-after: [START howto_operator_datasync_create_task] diff --git a/docs/apache-airflow-providers-amazon/operators/dms.rst b/docs/apache-airflow-providers-amazon/operators/dms.rst index 2c30e3ca6ec88..b80fb9e2b3ac4 100644 --- a/docs/apache-airflow-providers-amazon/operators/dms.rst +++ b/docs/apache-airflow-providers-amazon/operators/dms.rst @@ -52,7 +52,7 @@ Create a replication task To create a replication task you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsCreateTaskOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_create_task] @@ -66,7 +66,7 @@ Start a replication task To start a replication task you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsStartTaskOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_start_task] @@ -80,7 +80,7 @@ Get details of replication tasks To retrieve the details for a list of replication tasks you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsDescribeTasksOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_describe_tasks] @@ -94,7 +94,7 @@ Stop a replication task To stop a replication task you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsStopTaskOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_stop_task] @@ -108,7 +108,7 @@ Delete a replication task To delete a replication task you can use :class:`~airflow.providers.amazon.aws.operators.dms.DmsDeleteTaskOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_operator_dms_delete_task] @@ -125,7 +125,7 @@ Wait for a replication task to complete To check the state of a replication task until it is completed, you can use :class:`~airflow.providers.amazon.aws.sensors.dms.DmsTaskCompletedSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dms.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dms.py :language: python :dedent: 4 :start-after: [START howto_sensor_dms_task_completed] diff --git a/docs/apache-airflow-providers-amazon/operators/dynamodb.rst b/docs/apache-airflow-providers-amazon/operators/dynamodb.rst index d899f0de9223e..aab6626109d0e 100644 --- a/docs/apache-airflow-providers-amazon/operators/dynamodb.rst +++ b/docs/apache-airflow-providers-amazon/operators/dynamodb.rst @@ -51,7 +51,7 @@ Wait for a Single Attribute Value Match: This example shows how to use ``DynamoDBValueSensor`` to wait for a specific attribute/value pair in a DynamoDB item. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dynamodb.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb.py :language: python :start-after: [START howto_sensor_dynamodb_value] :dedent: 4 @@ -62,7 +62,7 @@ Wait for Any Value from a List of Attribute Values: In this example, the sensor waits for the DynamoDB item to have an attribute that matches any value from a provided list. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dynamodb.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb.py :language: python :start-after: [START howto_sensor_dynamodb_any_value] :dedent: 4 diff --git a/docs/apache-airflow-providers-amazon/operators/ec2.rst b/docs/apache-airflow-providers-amazon/operators/ec2.rst index e5462b32a18f4..4cfeb17d24651 100644 --- a/docs/apache-airflow-providers-amazon/operators/ec2.rst +++ b/docs/apache-airflow-providers-amazon/operators/ec2.rst @@ -38,7 +38,7 @@ Start an Amazon EC2 instance To start an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2StartInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_start_instance] @@ -52,7 +52,7 @@ Stop an Amazon EC2 instance To stop an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2StopInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_stop_instance] @@ -66,7 +66,7 @@ Create and start an Amazon EC2 instance To create and start an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2CreateInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_create_instance] @@ -80,7 +80,7 @@ Terminate an Amazon EC2 instance To terminate an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2TerminateInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_terminate_instance] @@ -94,7 +94,7 @@ Reboot an Amazon EC2 instance To reboot an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2RebootInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_reboot_instance] @@ -108,7 +108,7 @@ Hibernate an Amazon EC2 instance To hibernate an Amazon EC2 instance you can use :class:`~airflow.providers.amazon.aws.operators.ec2.EC2HibernateInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_operator_ec2_hibernate_instance] @@ -125,7 +125,7 @@ Wait on an Amazon EC2 instance state To check the state of an Amazon EC2 instance and wait until it reaches the target state you can use :class:`~airflow.providers.amazon.aws.sensors.ec2.EC2InstanceStateSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ec2.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ec2.py :language: python :dedent: 4 :start-after: [START howto_sensor_ec2_instance_state] diff --git a/docs/apache-airflow-providers-amazon/operators/ecs.rst b/docs/apache-airflow-providers-amazon/operators/ecs.rst index 2c4b3c7e7f584..8cb02d539e7de 100644 --- a/docs/apache-airflow-providers-amazon/operators/ecs.rst +++ b/docs/apache-airflow-providers-amazon/operators/ecs.rst @@ -48,7 +48,7 @@ To create an Amazon ECS cluster you can use All optional parameters to be passed to the Create Cluster API should be passed in the 'create_cluster_kwargs' dict. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_create_cluster] @@ -63,7 +63,7 @@ To delete an Amazon ECS cluster you can use :class:`~airflow.providers.amazon.aws.operators.ecs.EcsDeleteClusterOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_delete_cluster] @@ -81,7 +81,7 @@ All optional parameters to be passed to the Register Task Definition API should passed in the 'register_task_kwargs' dict. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_register_task_definition] @@ -96,7 +96,7 @@ To deregister a task definition you can use :class:`~airflow.providers.amazon.aws.operators.ecs.EcsDeregisterTaskDefinitionOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_deregister_task_definition] @@ -125,14 +125,14 @@ The parameters you need to configure for this Operator will depend upon which `` * If you are using EC2 as the compute resources in your ECS Cluster, set the parameter to EC2. * If you have integrated external resources in your ECS Cluster, for example using ECS Anywhere, and want to run your containers on those external resources, set the parameter to EXTERNAL. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs_run_task] :end-before: [END howto_operator_ecs_run_task] -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs_fargate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs_fargate.py :language: python :dedent: 4 :start-after: [START howto_operator_ecs] @@ -145,7 +145,7 @@ To stream logs to AWS CloudWatch, you need to define the parameters below. Using the example above, we would add these additional parameters to enable logging to CloudWatch. You need to ensure that you have the appropriate level of permissions (see next section). -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 8 :start-after: [START howto_awslogs_ecs] @@ -228,7 +228,7 @@ the failure reason if a failed state is provided and that state is reached before the target state. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_sensor_ecs_cluster_state] @@ -248,7 +248,7 @@ to change that. Raises an AirflowException with the failure reason if the faile is reached before the target state. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs.py :language: python :dedent: 4 :start-after: [START howto_sensor_ecs_task_definition_state] @@ -267,7 +267,7 @@ both can be overridden with provided values. Raises an AirflowException with the failure reason if a failed state is provided and that state is reached before the target state. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ecs_fargate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ecs_fargate.py :language: python :dedent: 4 :start-after: [START howto_sensor_ecs_task_state] diff --git a/docs/apache-airflow-providers-amazon/operators/eks.rst b/docs/apache-airflow-providers-amazon/operators/eks.rst index 9f1cc9df61eec..aa774b9095957 100644 --- a/docs/apache-airflow-providers-amazon/operators/eks.rst +++ b/docs/apache-airflow-providers-amazon/operators/eks.rst @@ -46,7 +46,7 @@ Note: An AWS IAM role with the following permissions is required: ``eks.amazonaws.com`` must be added to the Trusted Relationships ``AmazonEKSClusterPolicy`` IAM Policy must be attached -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_cluster] @@ -65,7 +65,7 @@ Note: An AWS IAM role with the following permissions is required: ``AmazonEKSClusterPolicy`` IAM Policy must be attached ``AmazonEKSWorkerNodePolicy`` IAM Policy must be attached -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroup_in_one_step.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_cluster_with_nodegroup] @@ -85,7 +85,7 @@ Note: An AWS IAM role with the following permissions is required: ``AmazonEKSClusterPolicy`` IAM Policy must be attached ``AmazonEKSWorkerNodePolicy`` IAM Policy must be attached -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_fargate_in_one_step.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_cluster_with_fargate_profile] @@ -100,7 +100,7 @@ To delete an existing Amazon EKS Cluster you can use :class:`~airflow.providers.amazon.aws.operators.eks.EksDeleteClusterOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_delete_cluster] @@ -110,7 +110,7 @@ Note: If the cluster has any attached resources, such as an Amazon EKS Nodegroup Fargate profile, the cluster can not be deleted. Using the ``force`` parameter will attempt to delete any attached resources first. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroup_in_one_step.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_force_delete_cluster] @@ -130,7 +130,7 @@ Note: An AWS IAM role with the following permissions is required: ``AmazonEC2ContainerRegistryReadOnly`` IAM Policy must be attached ``AmazonEKSWorkerNodePolicy`` IAM Policy must be attached -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_nodegroup] @@ -145,7 +145,7 @@ To delete an existing Amazon EKS managed node group you can use :class:`~airflow.providers.amazon.aws.operators.eks.EksDeleteNodegroupOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_delete_nodegroup] @@ -164,7 +164,7 @@ Note: An AWS IAM role with the following permissions is required: ``AmazonEC2ContainerRegistryReadOnly`` IAM Policy must be attached ``AmazonEKSWorkerNodePolicy`` IAM Policy must be attached -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_create_fargate_profile] @@ -178,7 +178,7 @@ Delete an AWS Fargate Profile To delete an existing AWS Fargate Profile you can use :class:`~airflow.providers.amazon.aws.operators.eks.EksDeleteFargateProfileOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_delete_fargate_profile] @@ -194,7 +194,7 @@ To run a pod on an existing Amazon EKS Cluster, you can use Note: An Amazon EKS Cluster with underlying compute infrastructure is required. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_operator_eks_pod_operator] @@ -211,7 +211,7 @@ Wait on an Amazon EKS cluster state To check the state of an Amazon EKS Cluster until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.eks.EksClusterStateSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_sensor_eks_cluster] @@ -225,7 +225,7 @@ Wait on an Amazon EKS managed node group state To check the state of an Amazon EKS managed node group until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.eks.EksNodegroupStateSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_nodegroups.py :language: python :dedent: 4 :start-after: [START howto_sensor_eks_nodegroup] @@ -239,7 +239,7 @@ Wait on an AWS Fargate profile state To check the state of an AWS Fargate profile until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.eks.EksFargateProfileSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py :language: python :dedent: 4 :start-after: [START howto_sensor_eks_fargate] diff --git a/docs/apache-airflow-providers-amazon/operators/emr/emr.rst b/docs/apache-airflow-providers-amazon/operators/emr/emr.rst index 8a2255ddbf4cc..5e32baa151c4f 100644 --- a/docs/apache-airflow-providers-amazon/operators/emr/emr.rst +++ b/docs/apache-airflow-providers-amazon/operators/emr/emr.rst @@ -57,7 +57,7 @@ JobFlow configuration To create a job flow on EMR, you need to specify the configuration for the EMR cluster: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py :language: python :start-after: [START howto_operator_emr_steps_config] :end-before: [END howto_operator_emr_steps_config] @@ -80,7 +80,7 @@ Create the Job Flow In the following code we are creating a new job flow using the configuration as explained above. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_create_job_flow] @@ -98,7 +98,7 @@ Using ``deferrable`` mode will release worker slots and leads to efficient utili resources within Airflow cluster.However this mode will need the Airflow triggerer to be available in your deployment. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_add_steps] @@ -116,7 +116,7 @@ Using ``deferrable`` mode will release worker slots and leads to efficient utili resources within Airflow cluster.However this mode will need the Airflow triggerer to be available in your deployment. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_terminate_job_flow] @@ -130,7 +130,7 @@ Modify Amazon EMR container To modify an existing EMR container you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrContainerSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_modify_cluster] @@ -144,7 +144,7 @@ Start an EMR notebook execution You can use :class:`~airflow.providers.amazon.aws.operators.emr.EmrStartNotebookExecutionOperator` to start a notebook execution on an existing notebook attached to a running cluster. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_notebook_execution.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_notebook_execution.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_start_notebook_execution] @@ -158,7 +158,7 @@ Stop an EMR notebook execution You can use :class:`~airflow.providers.amazon.aws.operators.emr.EmrStopNotebookExecutionOperator` to stop a running notebook execution. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_notebook_execution.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_notebook_execution.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_stop_notebook_execution] @@ -175,7 +175,7 @@ Wait on an EMR notebook execution state To monitor the state of an EMR notebook execution you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrNotebookExecutionSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_notebook_execution.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_notebook_execution.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_notebook_execution] @@ -189,7 +189,7 @@ Wait on an Amazon EMR job flow state To monitor the state of an EMR job flow you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrJobFlowSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_job_flow] @@ -203,7 +203,7 @@ Wait on an Amazon EMR step state To monitor the state of an EMR job step you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrStepSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_step] diff --git a/docs/apache-airflow-providers-amazon/operators/emr/emr_eks.rst b/docs/apache-airflow-providers-amazon/operators/emr/emr_eks.rst index dc31c8f984618..122b22fa04841 100644 --- a/docs/apache-airflow-providers-amazon/operators/emr/emr_eks.rst +++ b/docs/apache-airflow-providers-amazon/operators/emr/emr_eks.rst @@ -48,7 +48,7 @@ the eks cluster that you would like to use , and an eks namespace. Refer to the `EMR on EKS Development guide `__ for more details. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_eks.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_eks.py :language: python :start-after: [START howto_operator_emr_eks_create_cluster] :end-before: [END howto_operator_emr_eks_create_cluster] @@ -81,7 +81,7 @@ and ``monitoringConfiguration`` to send logs to the ``/aws/emr-eks-spark`` log g Refer to the `EMR on EKS guide `__ for more details on job configuration. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_eks.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_eks.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_eks_config] @@ -92,7 +92,7 @@ can store them in a connection or provide them in the DAG. Your AWS region shoul in the ``aws_default`` connection as ``{"region_name": "us-east-1"}`` or a custom connection name that gets passed to the operator with the ``aws_conn_id`` parameter. The operator returns the Job ID of the job run. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_eks.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_eks.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_container] @@ -110,7 +110,7 @@ Wait on an Amazon EMR virtual cluster job To wait on the status of an Amazon EMR virtual cluster job to reach a terminal state, you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrContainerSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_eks.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_eks.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_container] diff --git a/docs/apache-airflow-providers-amazon/operators/emr/emr_serverless.rst b/docs/apache-airflow-providers-amazon/operators/emr/emr_serverless.rst index 9915763e19e09..28c7bb4720a09 100644 --- a/docs/apache-airflow-providers-amazon/operators/emr/emr_serverless.rst +++ b/docs/apache-airflow-providers-amazon/operators/emr/emr_serverless.rst @@ -43,7 +43,7 @@ create a new EMR Serverless Application. This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_serverless_create_application] @@ -59,7 +59,7 @@ start an EMR Serverless Job. This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_serverless_start_job] @@ -93,7 +93,7 @@ stop an EMR Serverless Application. This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_serverless_stop_application] @@ -109,7 +109,7 @@ delete an EMR Serverless Application. This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_operator_emr_serverless_delete_application] @@ -126,7 +126,7 @@ Wait on an EMR Serverless Job state To monitor the state of an EMR Serverless Job you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrServerlessJobSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_serverless_job] @@ -140,7 +140,7 @@ Wait on an EMR Serverless Application state To monitor the state of an EMR Serverless Application you can use :class:`~airflow.providers.amazon.aws.sensors.emr.EmrServerlessApplicationSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_emr_serverless.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_emr_serverless.py :language: python :dedent: 4 :start-after: [START howto_sensor_emr_serverless_application] diff --git a/docs/apache-airflow-providers-amazon/operators/eventbridge.rst b/docs/apache-airflow-providers-amazon/operators/eventbridge.rst index 453e5af31070a..a39c884149255 100644 --- a/docs/apache-airflow-providers-amazon/operators/eventbridge.rst +++ b/docs/apache-airflow-providers-amazon/operators/eventbridge.rst @@ -48,7 +48,7 @@ Send events to Amazon EventBridge To send custom events to Amazon EventBridge, use :class:`~airflow.providers.amazon.aws.operators.eventbridge.EventBridgePutEventsOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eventbridge.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eventbridge.py :language: python :dedent: 4 :start-after: [START howto_operator_eventbridge_put_events] @@ -63,7 +63,7 @@ Create or update a rule on Amazon EventBridge To create or update a rule on EventBridge, use :class:`~airflow.providers.amazon.aws.operators.eventbridge.EventBridgePutRuleOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eventbridge.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eventbridge.py :language: python :dedent: 4 :start-after: [START howto_operator_eventbridge_put_rule] @@ -78,7 +78,7 @@ Enable a rule on Amazon EventBridge To enable an existing rule on EventBridge, use :class:`~airflow.providers.amazon.aws.operators.eventbridge.EventBridgeEnableRuleOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eventbridge.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eventbridge.py :language: python :dedent: 4 :start-after: [START howto_operator_eventbridge_enable_rule] @@ -93,7 +93,7 @@ Disable a rule on Amazon EventBridge To disable an existing rule on EventBridge, use :class:`~airflow.providers.amazon.aws.operators.eventbridge.EventBridgeDisableRuleOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_eventbridge.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_eventbridge.py :language: python :dedent: 4 :start-after: [START howto_operator_eventbridge_disable_rule] diff --git a/docs/apache-airflow-providers-amazon/operators/glue.rst b/docs/apache-airflow-providers-amazon/operators/glue.rst index 18a9b887d8b40..88b4d3374dbf3 100644 --- a/docs/apache-airflow-providers-amazon/operators/glue.rst +++ b/docs/apache-airflow-providers-amazon/operators/glue.rst @@ -46,7 +46,7 @@ AWS Glue Crawlers allow you to easily extract data from various data sources. To create a new AWS Glue Crawler or run an existing one you can use :class:`~airflow.providers.amazon.aws.operators.glue_crawler.GlueCrawlerOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_crawler] @@ -64,7 +64,7 @@ Submit an AWS Glue job To submit a new AWS Glue job you can use :class:`~airflow.providers.amazon.aws.operators.glue.GlueJobOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_operator_glue] @@ -84,7 +84,7 @@ of your data so that you can make good business decisions. To create a new AWS Glue Data Quality ruleset or update an existing one you can use :class:`~airflow.providers.amazon.aws.operators.glue.GlueDataQualityOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue_data_quality.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_data_quality_operator] @@ -98,7 +98,7 @@ Start a AWS Glue Data Quality Evaluation Run To start a AWS Glue Data Quality ruleset evaluation run you can use :class:`~airflow.providers.amazon.aws.operators.glue.GlueDataQualityRuleSetEvaluationRunOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue_data_quality.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_data_quality_ruleset_evaluation_run_operator] @@ -112,7 +112,7 @@ Start a AWS Glue Data Quality Recommendation Run To start a AWS Glue Data Quality rule recommendation run you can use :class:`~airflow.providers.amazon.aws.operators.glue.GlueDataQualityRuleRecommendationRunOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue_data_quality_with_recommendation.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_data_quality_rule_recommendation_run] @@ -129,7 +129,7 @@ Wait on an AWS Glue crawler state To wait on the state of an AWS Glue crawler execution until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue_crawler.GlueCrawlerSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue_crawler] @@ -143,7 +143,7 @@ Wait on an AWS Glue job state To wait on the state of an AWS Glue Job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue.GlueJobSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue] @@ -157,7 +157,7 @@ Wait on an AWS Glue Data Quality Evaluation Run To wait on the state of an AWS Glue Data Quality RuleSet Evaluation Run until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue.GlueDataQualityRuleSetEvaluationRunSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue_data_quality.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue_data_quality_ruleset_evaluation_run] @@ -171,7 +171,7 @@ Wait on an AWS Glue Data Quality Recommendation Run To wait on the state of an AWS Glue Data Quality recommendation run until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue.GlueDataQualityRuleRecommendationRunSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue_data_quality_with_recommendation.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue_data_quality_rule_recommendation_run] @@ -185,7 +185,7 @@ Wait on an AWS Glue Catalog Partition To wait for a partition to show up in AWS Glue Catalog until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.glue_catalog_partition.GlueCatalogPartitionSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue.py :language: python :dedent: 4 :start-after: [START howto_sensor_glue_catalog_partition] diff --git a/docs/apache-airflow-providers-amazon/operators/glue_databrew.rst b/docs/apache-airflow-providers-amazon/operators/glue_databrew.rst index be654335ea1d0..2286a5146a59d 100644 --- a/docs/apache-airflow-providers-amazon/operators/glue_databrew.rst +++ b/docs/apache-airflow-providers-amazon/operators/glue_databrew.rst @@ -46,7 +46,7 @@ Start an AWS Glue DataBrew job To submit a new AWS Glue DataBrew job you can use :class:`~airflow.providers.amazon.aws.operators.glue_databrew.GlueDataBrewStartJobOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glue_databrew.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glue_databrew.py :language: python :dedent: 4 :start-after: [START howto_operator_glue_databrew_start] diff --git a/docs/apache-airflow-providers-amazon/operators/kinesis_analytics.rst b/docs/apache-airflow-providers-amazon/operators/kinesis_analytics.rst index d7156feabb771..dc351fc1c3401 100644 --- a/docs/apache-airflow-providers-amazon/operators/kinesis_analytics.rst +++ b/docs/apache-airflow-providers-amazon/operators/kinesis_analytics.rst @@ -44,7 +44,7 @@ Create an Amazon Managed Service for Apache Flink Application To create an Amazon Managed Service for Apache Flink application, you can use :class:`~airflow.providers.amazon.aws.operators.kinesis_analytics.KinesisAnalyticsV2CreateApplicationOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_operator_create_application] @@ -58,7 +58,7 @@ Start an Amazon Managed Service for Apache Flink Application To start an Amazon Managed Service for Apache Flink application, you can use :class:`~airflow.providers.amazon.aws.operators.kinesis_analytics.KinesisAnalyticsV2StartApplicationOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_operator_start_application] @@ -72,7 +72,7 @@ Stop an Amazon Managed Service for Apache Flink Application To stop an Amazon Managed Service for Apache Flink application, you can use :class:`~airflow.providers.amazon.aws.operators.kinesis_analytics.KinesisAnalyticsV2StopApplicationOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_operator_stop_application] @@ -89,7 +89,7 @@ Wait for an Amazon Managed Service for Apache Flink Application to start To wait on the state of an Amazon Managed Service for Apache Flink Application to start you can use :class:`~airflow.providers.amazon.aws.sensors.kinesis_analytics.KinesisAnalyticsV2StartApplicationCompletedSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_sensor_start_application] @@ -103,7 +103,7 @@ Wait for an Amazon Managed Service for Apache Flink Application to stop To wait on the state of an Amazon Managed Service for Apache Flink Application to stop you can use :class:`~airflow.providers.amazon.aws.sensors.kinesis_analytics.KinesisAnalyticsV2StopApplicationCompletedSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_kinesis_analytics.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_kinesis_analytics.py :language: python :dedent: 4 :start-after: [START howto_sensor_stop_application] diff --git a/docs/apache-airflow-providers-amazon/operators/lambda.rst b/docs/apache-airflow-providers-amazon/operators/lambda.rst index ef2576a1f5413..3a9f1a0132451 100644 --- a/docs/apache-airflow-providers-amazon/operators/lambda.rst +++ b/docs/apache-airflow-providers-amazon/operators/lambda.rst @@ -48,7 +48,7 @@ To create an AWS lambda function you can use This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_lambda.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_lambda.py :language: python :dedent: 4 :start-after: [START howto_operator_create_lambda_function] @@ -111,7 +111,7 @@ To invoke an AWS lambda function you can use The only way is `configuring destinations for asynchronous invocation `_ and sensing destination. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_lambda.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_lambda.py :language: python :dedent: 4 :start-after: [START howto_operator_invoke_lambda_function] @@ -128,7 +128,7 @@ Wait on an AWS Lambda function deployment state To check the deployment state of an AWS Lambda function until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.lambda_function.LambdaFunctionStateSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_lambda.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_lambda.py :language: python :dedent: 4 :start-after: [START howto_sensor_lambda_function_state] diff --git a/docs/apache-airflow-providers-amazon/operators/neptune.rst b/docs/apache-airflow-providers-amazon/operators/neptune.rst index 98c0d7dd57c4a..7b9204e1c0279 100644 --- a/docs/apache-airflow-providers-amazon/operators/neptune.rst +++ b/docs/apache-airflow-providers-amazon/operators/neptune.rst @@ -49,7 +49,7 @@ the aiobotocore module to be installed. .. note:: This operator only starts an existing Neptune database cluster, it does not create a cluster. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_neptune.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_neptune.py :language: python :dedent: 4 :start-after: [START howto_operator_start_neptune_cluster] @@ -65,7 +65,7 @@ To stop a running Neptune database cluster, you can use This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. This requires the aiobotocore module to be installed. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_neptune.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_neptune.py :language: python :dedent: 4 :start-after: [START howto_operator_stop_neptune_cluster] diff --git a/docs/apache-airflow-providers-amazon/operators/opensearchserverless.rst b/docs/apache-airflow-providers-amazon/operators/opensearchserverless.rst index 4fb0b4db93631..4c6f368655812 100644 --- a/docs/apache-airflow-providers-amazon/operators/opensearchserverless.rst +++ b/docs/apache-airflow-providers-amazon/operators/opensearchserverless.rst @@ -46,7 +46,7 @@ Wait for an Amazon OpenSearch Serverless Collection to become active To wait on the state of an Amazon Bedrock customize model job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.bedrock.OpenSearchServerlessCollectionActiveSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py :language: python :dedent: 4 :start-after: [START howto_sensor_opensearch_collection_active] diff --git a/docs/apache-airflow-providers-amazon/operators/quicksight.rst b/docs/apache-airflow-providers-amazon/operators/quicksight.rst index 9cc0abe337e41..4ae412da6e3ae 100644 --- a/docs/apache-airflow-providers-amazon/operators/quicksight.rst +++ b/docs/apache-airflow-providers-amazon/operators/quicksight.rst @@ -46,7 +46,7 @@ Amazon QuickSight create ingestion The ``QuickSightCreateIngestionOperator`` creates and starts a new SPICE ingestion for a dataset. The operator also refreshes existing SPICE datasets. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_quicksight.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_quicksight.py :language: python :dedent: 4 :start-after: [START howto_operator_quicksight_create_ingestion] @@ -62,7 +62,7 @@ Amazon QuickSight ingestion sensor The ``QuickSightSensor`` waits for an Amazon QuickSight create ingestion until it reaches a terminal state. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_quicksight.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_quicksight.py :language: python :dedent: 4 :start-after: [START howto_sensor_quicksight] diff --git a/docs/apache-airflow-providers-amazon/operators/rds.rst b/docs/apache-airflow-providers-amazon/operators/rds.rst index e27bbc2d2f699..9b06c1a048305 100644 --- a/docs/apache-airflow-providers-amazon/operators/rds.rst +++ b/docs/apache-airflow-providers-amazon/operators/rds.rst @@ -41,7 +41,7 @@ To create a snapshot of an Amazon RDS database instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSCreateDBSnapshotOperator`. The source database instance must be in the ``available`` or ``storage-optimization`` state. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_snapshot.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_snapshot.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_create_db_snapshot] @@ -56,7 +56,7 @@ To copy a snapshot of an Amazon RDS database instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSCopyDBSnapshotOperator`. The source database snapshot must be in the ``available`` state. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_snapshot.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_snapshot.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_copy_snapshot] @@ -71,7 +71,7 @@ To delete a snapshot of an Amazon RDS database instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSDeleteDBSnapshotOperator`. The database snapshot must be in the ``available`` state to be deleted. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_snapshot.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_snapshot.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_delete_snapshot] @@ -86,7 +86,7 @@ To export an Amazon RDS snapshot to Amazon S3 you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSStartExportTaskOperator`. The provided IAM role must have access to the S3 bucket. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_export.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_export.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_start_export_task] @@ -101,7 +101,7 @@ To cancel an Amazon RDS export task to S3 you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSCancelExportTaskOperator`. Any data that has already been written to the S3 bucket isn't removed. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_export.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_export.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_cancel_export] @@ -118,7 +118,7 @@ This action requires an Amazon SNS topic Amazon Resource Name (ARN). Amazon RDS event notification is only available for not encrypted SNS topics. If you specify an encrypted SNS topic, event notifications are not sent for the topic. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_event.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_event.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_create_event_subscription] @@ -132,7 +132,7 @@ Unsubscribe to an Amazon RDS event notification To delete an Amazon RDS event subscription you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSDeleteEventSubscriptionOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_event.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_event.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_delete_event_subscription] @@ -147,7 +147,7 @@ To create a AWS DB instance you can use :class:`~airflow.providers.amazon.aws.operators.rds.RdsCreateDbInstanceOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_create_db_instance] @@ -162,7 +162,7 @@ To delete a AWS DB instance you can use :class:`~airflow.providers.amazon.aws.operators.rds.RDSDeleteDbInstanceOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_delete_db_instance] @@ -176,7 +176,7 @@ Start a database instance or cluster To start an Amazon RDS DB instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RdsStartDbOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_start_db] @@ -191,7 +191,7 @@ Stop a database instance or cluster To stop an Amazon RDS DB instance or cluster you can use :class:`~airflow.providers.amazon.aws.operators.rds.RdsStopDbOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_operator_rds_stop_db] @@ -209,7 +209,7 @@ To wait for an Amazon RDS instance or cluster to reach a specific status you can :class:`~airflow.providers.amazon.aws.sensors.rds.RdsDbSensor`. By default, the sensor waits for a database instance to reach the ``available`` state. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_instance.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_instance.py :language: python :dedent: 4 :start-after: [START howto_sensor_rds_instance] @@ -225,7 +225,7 @@ To wait for an Amazon RDS snapshot with specific statuses you can use :class:`~airflow.providers.amazon.aws.sensors.rds.RdsSnapshotExistenceSensor`. By default, the sensor waits for the existence of a snapshot with status ``available``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_snapshot.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_snapshot.py :language: python :dedent: 4 :start-after: [START howto_sensor_rds_snapshot_existence] @@ -241,7 +241,7 @@ To wait a for an Amazon RDS snapshot export task with specific statuses you can :class:`~airflow.providers.amazon.aws.sensors.rds.RdsExportTaskExistenceSensor`. By default, the sensor waits for the existence of a snapshot with status ``available``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_rds_export.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_rds_export.py :language: python :dedent: 4 :start-after: [START howto_sensor_rds_export_task_existence] diff --git a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_cluster.rst b/docs/apache-airflow-providers-amazon/operators/redshift/redshift_cluster.rst index a50e56a760bbd..bc710b56eecc7 100644 --- a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_cluster.rst +++ b/docs/apache-airflow-providers-amazon/operators/redshift/redshift_cluster.rst @@ -40,7 +40,7 @@ Create an Amazon Redshift cluster To create an Amazon Redshift Cluster with the specified parameters you can use :class:`~airflow.providers.amazon.aws.operators.redshift_cluster.RedshiftCreateClusterOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_cluster] @@ -56,7 +56,7 @@ To resume a 'paused' Amazon Redshift cluster you can use You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. This will ensure that the task is deferred from the Airflow worker slot and polling for the task status happens on the trigger. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_resume_cluster] @@ -71,7 +71,7 @@ To pause an ``available`` Amazon Redshift cluster you can use :class:`RedshiftPauseClusterOperator `. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True`` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_pause_cluster] @@ -85,7 +85,7 @@ Create an Amazon Redshift cluster snapshot To create Amazon Redshift cluster snapshot you can use :class:`RedshiftCreateClusterSnapshotOperator ` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_create_cluster_snapshot] @@ -99,7 +99,7 @@ Delete an Amazon Redshift cluster snapshot To delete Amazon Redshift cluster snapshot you can use :class:`RedshiftDeleteClusterSnapshotOperator ` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_delete_cluster_snapshot] @@ -114,7 +114,7 @@ To delete an Amazon Redshift cluster you can use :class:`RedshiftDeleteClusterOperator `. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True`` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_delete_cluster] @@ -131,7 +131,7 @@ Wait on an Amazon Redshift cluster state To check the state of an Amazon Redshift Cluster until it reaches the target state or another terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.redshift_cluster.RedshiftClusterSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_sensor_redshift_cluster] diff --git a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_data.rst b/docs/apache-airflow-providers-amazon/operators/redshift/redshift_data.rst index 2638e1732cd6c..762eced74fa68 100644 --- a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_data.rst +++ b/docs/apache-airflow-providers-amazon/operators/redshift/redshift_data.rst @@ -48,7 +48,7 @@ statements against an Amazon Redshift cluster. This differs from ``RedshiftSQLOperator`` in that it allows users to query and retrieve data via the AWS API and avoid the necessity of a Postgres connection. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_data] @@ -60,7 +60,7 @@ Reuse a session when executing multiple statements Specify the ``session_keep_alive_seconds`` parameter on an upstream task. In a downstream task, get the session ID from the XCom and pass it to the ``session_id`` parameter. This is useful when you work with temporary tables. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift.py :language: python :dedent: 4 :start-after: [START howto_operator_redshift_data_session_reuse] diff --git a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_sql.rst b/docs/apache-airflow-providers-amazon/operators/redshift/redshift_sql.rst index 86444d664f3a0..5f4f28b38c56c 100644 --- a/docs/apache-airflow-providers-amazon/operators/redshift/redshift_sql.rst +++ b/docs/apache-airflow-providers-amazon/operators/redshift/redshift_sql.rst @@ -40,7 +40,7 @@ The generic ``SQLExecuteQueryOperator`` can be used to execute SQL queries again To execute a SQL query against an Amazon Redshift cluster without using a Redshift connection, please check ``RedshiftDataOperator``. -.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_execute_query.py +.. exampleinclude:: /../../providers/tests/system/common/sql/example_sql_execute_query.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_execute_query] diff --git a/docs/apache-airflow-providers-amazon/operators/s3/glacier.rst b/docs/apache-airflow-providers-amazon/operators/s3/glacier.rst index c85e7ac294c7c..ab5b6acbe704c 100644 --- a/docs/apache-airflow-providers-amazon/operators/s3/glacier.rst +++ b/docs/apache-airflow-providers-amazon/operators/s3/glacier.rst @@ -45,7 +45,7 @@ use :class:`~airflow.providers.amazon.aws.transfers.glacier_to_gcs.GlacierCreate This Operator returns a dictionary of information related to the initiated job such as *jobId*, which is required for subsequent tasks. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glacier_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glacier_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_glacier_create_job] @@ -59,7 +59,7 @@ Upload archive to an Amazon Glacier To add an archive to an Amazon S3 Glacier vault use :class:`~airflow.providers.amazon.aws.transfers.glacier_to_gcs.GlacierUploadArchiveOperator` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glacier_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glacier_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_glacier_upload_archive] @@ -76,7 +76,7 @@ Wait on an Amazon Glacier job state To wait on the status of an Amazon Glacier Job to reach a terminal state use :class:`~airflow.providers.amazon.aws.sensors.glacier.GlacierJobOperationSensor` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glacier_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glacier_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_sensor_glacier_job_operation] diff --git a/docs/apache-airflow-providers-amazon/operators/s3/s3.rst b/docs/apache-airflow-providers-amazon/operators/s3/s3.rst index 41e5c7149bb18..ff48c1d5d15b8 100644 --- a/docs/apache-airflow-providers-amazon/operators/s3/s3.rst +++ b/docs/apache-airflow-providers-amazon/operators/s3/s3.rst @@ -38,7 +38,7 @@ Create an Amazon S3 bucket To create an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3CreateBucketOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_create_bucket] @@ -52,7 +52,7 @@ Delete an Amazon S3 bucket To delete an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3DeleteBucketOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_delete_bucket] @@ -66,7 +66,7 @@ Set the tags for an Amazon S3 bucket To set the tags for an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3PutBucketTaggingOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_put_bucket_tagging] @@ -80,7 +80,7 @@ Get the tag of an Amazon S3 bucket To get the tag set associated with an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3GetBucketTaggingOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_get_bucket_tagging] @@ -94,7 +94,7 @@ Delete the tags of an Amazon S3 bucket To delete the tags of an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3DeleteBucketTaggingOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_delete_bucket_tagging] @@ -108,7 +108,7 @@ Create an Amazon S3 object To create a new (or replace) Amazon S3 object you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3CreateObjectOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_create_object] @@ -123,7 +123,7 @@ To copy an Amazon S3 object from one bucket to another you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3CopyObjectOperator`. The Amazon S3 connection used here needs to have access to both source and destination bucket/key. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_copy_object] @@ -137,7 +137,7 @@ Delete Amazon S3 objects To delete one or multiple Amazon S3 objects you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3DeleteObjectsOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_delete_objects] @@ -153,7 +153,7 @@ To transform the data from one Amazon S3 object and save it to another object yo You can also apply an optional `Amazon S3 Select expression `_ to select the data you want to retrieve from ``source_s3_key`` using ``select_expression``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_file_transform] @@ -169,7 +169,7 @@ To list all Amazon S3 prefixes within an Amazon S3 bucket you can use See `here `__ for more information about Amazon S3 prefixes. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_list_prefixes] @@ -184,7 +184,7 @@ To list all Amazon S3 objects within an Amazon S3 bucket you can use :class:`~airflow.providers.amazon.aws.operators.s3.S3ListOperator`. You can specify a ``prefix`` to filter the objects whose name begins with such prefix. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_operator_s3_list] @@ -208,7 +208,7 @@ Please keep in mind, especially when used to check a large volume of keys, that To check one file: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_single_key] @@ -216,7 +216,7 @@ To check one file: To check multiple files: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_multiple_keys] @@ -224,7 +224,7 @@ To check multiple files: To check a file with regular expression: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_regex] @@ -244,13 +244,13 @@ multiple files can match one key. The list of matched S3 object attributes conta [{"Size": int}] -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_function_definition] :end-before: [END howto_sensor_s3_key_function_definition] -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_function] @@ -262,7 +262,7 @@ the triggerer asynchronously. Note that this will need triggerer to be available To check one file: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_single_key_deferrable] @@ -270,7 +270,7 @@ To check one file: To check multiple files: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_multiple_keys_deferrable] @@ -278,7 +278,7 @@ To check multiple files: To check a file with regular expression: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_key_regex_deferrable] @@ -295,7 +295,7 @@ the inactivity period has passed with no increase in the number of objects you c Note, this sensor will not behave correctly in reschedule mode, as the state of the listed objects in the Amazon S3 bucket will be lost between rescheduled invocations. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3.py :language: python :dedent: 4 :start-after: [START howto_sensor_s3_keys_unchanged] diff --git a/docs/apache-airflow-providers-amazon/operators/sagemaker.rst b/docs/apache-airflow-providers-amazon/operators/sagemaker.rst index c77b689693b03..c2f433267c306 100644 --- a/docs/apache-airflow-providers-amazon/operators/sagemaker.rst +++ b/docs/apache-airflow-providers-amazon/operators/sagemaker.rst @@ -42,7 +42,7 @@ Create an Amazon SageMaker processing job To create an Amazon Sagemaker processing job to sanitize your dataset you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerProcessingOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_processing] @@ -56,7 +56,7 @@ Create an Amazon SageMaker training job To create an Amazon Sagemaker training job you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerTrainingOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_training] @@ -70,7 +70,7 @@ Create an Amazon SageMaker model To create an Amazon Sagemaker model you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerModelOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_model] @@ -84,7 +84,7 @@ Start a hyperparameter tuning job To start a hyperparameter tuning job for an Amazon Sagemaker model you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerTuningOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_tuning] @@ -98,7 +98,7 @@ Delete an Amazon SageMaker model To delete an Amazon Sagemaker model you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerDeleteModelOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_delete_model] @@ -112,7 +112,7 @@ Create an Amazon SageMaker transform job To create an Amazon Sagemaker transform job you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerTransformOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_transform] @@ -126,7 +126,7 @@ Create an Amazon SageMaker endpoint config job To create an Amazon Sagemaker endpoint config job you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerEndpointConfigOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_endpoint.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_endpoint.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_endpoint_config] @@ -140,7 +140,7 @@ Create an Amazon SageMaker endpoint job To create an Amazon Sagemaker endpoint you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerEndpointOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_endpoint.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_endpoint.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_endpoint] @@ -154,7 +154,7 @@ Start an Amazon SageMaker pipeline execution To trigger an execution run for an already-defined Amazon Sagemaker pipeline, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerStartPipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_pipeline.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_start_pipeline] @@ -168,7 +168,7 @@ Stop an Amazon SageMaker pipeline execution To stop an Amazon Sagemaker pipeline execution that is currently running, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerStopPipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_pipeline.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_stop_pipeline] @@ -186,7 +186,7 @@ It consists of an inference specification that defines the inference image to us A model package group is a collection of model packages. You can use this operator to add a new version and model package to the group for every DAG run. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_register] @@ -202,7 +202,7 @@ An AutoML experiment will take some input data in CSV and the column it should l and train models on it without needing human supervision. The output is placed in an S3 bucket, and automatically deployed if configured for it. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_auto_ml] @@ -216,7 +216,7 @@ Create an Experiment for later use To create a SageMaker experiment, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerCreateExperimentOperator`. This creates an experiment so that it's ready to be associated with processing, training and transform jobs. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_experiment] @@ -230,7 +230,7 @@ Create a SageMaker Notebook Instance To create a SageMaker Notebook Instance , you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerCreateNotebookOperator`. This creates a SageMaker Notebook Instance ready to run Jupyter notebooks. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_notebook.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_notebook.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_notebook_create] @@ -244,7 +244,7 @@ Stop a SageMaker Notebook Instance To terminate SageMaker Notebook Instance , you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerStopNotebookOperator`. This terminates the ML compute instance and disconnects the ML storage volume. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_notebook.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_notebook.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_notebook_stop] @@ -258,7 +258,7 @@ Start a SageMaker Notebook Instance To launch a SageMaker Notebook Instance and re-attach an ML storage volume, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerStartNotebookOperator`. This launches a new ML compute instance with the latest version of the libraries and attached your ML storage volume. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_notebook.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_notebook.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_notebook_start] @@ -273,7 +273,7 @@ Delete a SageMaker Notebook Instance To delete a SageMaker Notebook Instance, you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerDeleteNotebookOperator`. This terminates the instance and deletes the ML storage volume and network interface associated with the instance. The instance must be stopped before it can be deleted. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_notebook.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_notebook.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_notebook_delete] @@ -290,7 +290,7 @@ Wait on an Amazon SageMaker training job state To check the state of an Amazon Sagemaker training job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerTrainingSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_training] @@ -304,7 +304,7 @@ Wait on an Amazon SageMaker transform job state To check the state of an Amazon Sagemaker transform job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.operators.sagemaker.SageMakerTransformOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_transform] @@ -318,7 +318,7 @@ Wait on an Amazon SageMaker tuning job state To check the state of an Amazon Sagemaker hyperparameter tuning job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerTuningSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_tuning] @@ -332,7 +332,7 @@ Wait on an Amazon SageMaker endpoint state To check the state of an Amazon Sagemaker endpoint until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerEndpointSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_endpoint.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_endpoint.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_endpoint] @@ -346,7 +346,7 @@ Wait on an Amazon SageMaker pipeline execution state To check the state of an Amazon Sagemaker pipeline execution until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerPipelineSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker_pipeline.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker_pipeline.py :language: python :dedent: 4 :start-after: [START howto_sensor_sagemaker_pipeline] @@ -360,7 +360,7 @@ Wait on an Amazon SageMaker AutoML experiment state To check the state of an Amazon Sagemaker AutoML job until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.sagemaker.SageMakerAutoMLSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sagemaker.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sagemaker.py :language: python :dedent: 4 :start-after: [START howto_operator_sagemaker_auto_ml] diff --git a/docs/apache-airflow-providers-amazon/operators/sns.rst b/docs/apache-airflow-providers-amazon/operators/sns.rst index e589e38f89b07..f4eb699e52801 100644 --- a/docs/apache-airflow-providers-amazon/operators/sns.rst +++ b/docs/apache-airflow-providers-amazon/operators/sns.rst @@ -48,7 +48,7 @@ Publish a message to an existing SNS topic To publish a message to an Amazon SNS Topic you can use :class:`~airflow.providers.amazon.aws.operators.sns.SnsPublishOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sns.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sns.py :language: python :dedent: 4 :start-after: [START howto_operator_sns_publish_operator] diff --git a/docs/apache-airflow-providers-amazon/operators/sqs.rst b/docs/apache-airflow-providers-amazon/operators/sqs.rst index 77fb38e2d8567..3eb0087079c38 100644 --- a/docs/apache-airflow-providers-amazon/operators/sqs.rst +++ b/docs/apache-airflow-providers-amazon/operators/sqs.rst @@ -50,7 +50,7 @@ To publish a message to an Amazon SQS queue you can use the In the following example, the task ``publish_to_queue`` publishes a message containing the task instance and the execution date to a queue with a default name of ``Airflow-Example-Queue``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sqs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sqs.py :language: python :dedent: 4 :start-after: [START howto_operator_sqs] @@ -68,7 +68,7 @@ To read messages from an Amazon SQS queue until exhausted use the :class:`~airflow.providers.amazon.aws.sensors.sqs.SqsSensor` This sensor can also be run in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sqs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sqs.py :language: python :dedent: 4 :start-after: [START howto_sensor_sqs] diff --git a/docs/apache-airflow-providers-amazon/operators/step_functions.rst b/docs/apache-airflow-providers-amazon/operators/step_functions.rst index 5ab5d19e68290..a83eafa6ae7ad 100644 --- a/docs/apache-airflow-providers-amazon/operators/step_functions.rst +++ b/docs/apache-airflow-providers-amazon/operators/step_functions.rst @@ -45,7 +45,7 @@ To start a new AWS Step Functions state machine execution you can use :class:`~airflow.providers.amazon.aws.operators.step_function.StepFunctionStartExecutionOperator`. You can also run this operator in deferrable mode by setting ``deferrable`` param to ``True``. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_step_functions.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_step_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_step_function_start_execution] @@ -59,7 +59,7 @@ Get an AWS Step Functions execution output To fetch the output from an AWS Step Function state machine execution you can use :class:`~airflow.providers.amazon.aws.operators.step_function.StepFunctionGetExecutionOutputOperator`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_step_functions.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_step_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_step_function_get_execution_output] @@ -76,7 +76,7 @@ Wait on an AWS Step Functions state machine execution state To wait on the state of an AWS Step Function state machine execution until it reaches a terminal state you can use :class:`~airflow.providers.amazon.aws.sensors.step_function.StepFunctionExecutionSensor`. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_step_functions.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_step_functions.py :language: python :dedent: 4 :start-after: [START howto_sensor_step_function_execution] diff --git a/docs/apache-airflow-providers-amazon/transfer/azure_blob_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/azure_blob_to_s3.rst index a3b9df5eb1ee5..cf68049f95165 100644 --- a/docs/apache-airflow-providers-amazon/transfer/azure_blob_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/azure_blob_to_s3.rst @@ -39,7 +39,7 @@ To copy data from an Azure Blob Storage container to an Amazon S3 bucket you can Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_azure_blob_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_azure_blob_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_azure_blob_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/dynamodb_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/dynamodb_to_s3.rst index 74c4b78cef497..c8d18f43c3c2e 100644 --- a/docs/apache-airflow-providers-amazon/transfer/dynamodb_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/dynamodb_to_s3.rst @@ -48,7 +48,7 @@ To get more information visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dynamodb_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_dynamodb_to_s3] @@ -57,7 +57,7 @@ Example usage: To parallelize the replication, users can create multiple ``DynamoDBToS3Operator`` tasks using the ``TotalSegments`` parameter. For instance to replicate with parallelism of 2, create two tasks: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dynamodb_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_dynamodb_to_s3_segmented] @@ -67,7 +67,7 @@ Users can also pass in ``point_in_time_export`` boolean param to ``DynamoDBToS3O Full export example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dynamodb_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_dynamodb_to_s3_in_some_point_in_time_full_export] @@ -75,7 +75,7 @@ Full export example usage: Incremental export example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_dynamodb_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_dynamodb_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_dynamodb_to_s3_in_some_point_in_time_incremental_export] diff --git a/docs/apache-airflow-providers-amazon/transfer/ftp_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/ftp_to_s3.rst index 0f1904208538c..13166a753232e 100644 --- a/docs/apache-airflow-providers-amazon/transfer/ftp_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/ftp_to_s3.rst @@ -41,7 +41,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_ftp_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_ftp_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_ftp_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/gcs_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/gcs_to_s3.rst index 0f44f477016b3..eb032d813c1c1 100644 --- a/docs/apache-airflow-providers-amazon/transfer/gcs_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/gcs_to_s3.rst @@ -39,7 +39,7 @@ To copy data from a Google Cloud Storage bucket to an Amazon S3 bucket you can u Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_gcs_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_gcs_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_gcs_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/glacier_to_gcs.rst b/docs/apache-airflow-providers-amazon/transfer/glacier_to_gcs.rst index 33ffa8033e2f0..775fda6491c4f 100644 --- a/docs/apache-airflow-providers-amazon/transfer/glacier_to_gcs.rst +++ b/docs/apache-airflow-providers-amazon/transfer/glacier_to_gcs.rst @@ -38,7 +38,7 @@ Amazon S3 Glacier To GCS transfer operator To transfer data from an Amazon Glacier vault to Google Cloud Storage you can use :class:`~airflow.providers.amazon.aws.transfers.glacier_to_gcs.GlacierToGCSOperator` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_glacier_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_glacier_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_transfer_glacier_to_gcs] diff --git a/docs/apache-airflow-providers-amazon/transfer/google_api_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/google_api_to_s3.rst index b5593f9232b1a..a1d2c8ea99bfb 100644 --- a/docs/apache-airflow-providers-amazon/transfer/google_api_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/google_api_to_s3.rst @@ -38,7 +38,7 @@ Google Sheets to Amazon S3 transfer operator This example loads data from Google Sheets and save it to an Amazon S3 file. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_google_api_sheets_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_google_api_sheets_to_s3] @@ -57,7 +57,7 @@ It searches for up to 50 videos (due to pagination) in a given time range (``YOUTUBE_VIDEO_PUBLISHED_AFTER``, ``YOUTUBE_VIDEO_PUBLISHED_BEFORE``) on a YouTube channel (``YOUTUBE_CHANNEL_ID``) saves the response in Amazon S3 and also pushes the data to xcom. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_google_api_youtube_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_google_api_youtube_search_to_s3] @@ -66,7 +66,7 @@ saves the response in Amazon S3 and also pushes the data to xcom. It passes over the YouTube IDs to the next request which then gets the information (``YOUTUBE_VIDEO_FIELDS``) for the requested videos and saves them in Amazon S3 (``S3_BUCKET_NAME``). -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_google_api_youtube_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_google_api_youtube_list_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/hive_to_dynamodb.rst b/docs/apache-airflow-providers-amazon/transfer/hive_to_dynamodb.rst index 733527e1acf54..a13cf012b4b97 100644 --- a/docs/apache-airflow-providers-amazon/transfer/hive_to_dynamodb.rst +++ b/docs/apache-airflow-providers-amazon/transfer/hive_to_dynamodb.rst @@ -44,7 +44,7 @@ To get more information visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_hive_to_dynamodb.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_hive_to_dynamodb.py :language: python :dedent: 4 :start-after: [START howto_transfer_hive_to_dynamodb] diff --git a/docs/apache-airflow-providers-amazon/transfer/http_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/http_to_s3.rst index d40e66f861b2b..d28c4508aae1e 100644 --- a/docs/apache-airflow-providers-amazon/transfer/http_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/http_to_s3.rst @@ -41,7 +41,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_http_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_http_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_http_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/imap_attachment_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/imap_attachment_to_s3.rst index 5787587eb0181..cb6f46d338eff 100644 --- a/docs/apache-airflow-providers-amazon/transfer/imap_attachment_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/imap_attachment_to_s3.rst @@ -38,7 +38,7 @@ Imap Attachment To Amazon S3 transfer operator To save an email attachment via IMAP protocol from an email server to an Amazon S3 Bucket you can use :class:`~airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapAttachmentToS3Operator` -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_imap_attachment_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_imap_attachment_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/local_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/local_to_s3.rst index 403db3ea9a826..d1ca167844464 100644 --- a/docs/apache-airflow-providers-amazon/transfer/local_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/local_to_s3.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_local_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_local_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_local_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/mongo_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/mongo_to_s3.rst index 8ad30e5830710..a9d8a441e670e 100644 --- a/docs/apache-airflow-providers-amazon/transfer/mongo_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/mongo_to_s3.rst @@ -43,7 +43,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_mongo_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_mongo_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_mongo_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/redshift_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/redshift_to_s3.rst index 980d44d48b756..f2b32d35c52d4 100644 --- a/docs/apache-airflow-providers-amazon/transfer/redshift_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/redshift_to_s3.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift_s3_transfers.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift_s3_transfers.py :language: python :dedent: 4 :start-after: [START howto_transfer_redshift_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_dynamodb.rst b/docs/apache-airflow-providers-amazon/transfer/s3_to_dynamodb.rst index 630863547d7a4..002d47728c24c 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_dynamodb.rst +++ b/docs/apache-airflow-providers-amazon/transfer/s3_to_dynamodb.rst @@ -48,7 +48,7 @@ To get more information visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3_to_dynamodb.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_dynamodb.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_dynamodb] @@ -57,7 +57,7 @@ Example usage: To load S3 data into an existing DynamoDB table use: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3_to_dynamodb.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_dynamodb.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_dynamodb_existing_table] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_ftp.rst b/docs/apache-airflow-providers-amazon/transfer/s3_to_ftp.rst index 3db4f321b2120..ecb33de0aa5f2 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_ftp.rst +++ b/docs/apache-airflow-providers-amazon/transfer/s3_to_ftp.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3_to_ftp.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_ftp.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_ftp] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_redshift.rst b/docs/apache-airflow-providers-amazon/transfer/s3_to_redshift.rst index 431973653e1a4..bcef5b272ac7a 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_redshift.rst +++ b/docs/apache-airflow-providers-amazon/transfer/s3_to_redshift.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift_s3_transfers.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift_s3_transfers.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_redshift] @@ -50,7 +50,7 @@ Example usage: Example of ingesting multiple keys: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_redshift_s3_transfers.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_redshift_s3_transfers.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_redshift_multiple_keys] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_sftp.rst b/docs/apache-airflow-providers-amazon/transfer/s3_to_sftp.rst index 9353721fa5099..56391b634c769 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_sftp.rst +++ b/docs/apache-airflow-providers-amazon/transfer/s3_to_sftp.rst @@ -43,7 +43,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3_to_sftp.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_sftp] diff --git a/docs/apache-airflow-providers-amazon/transfer/s3_to_sql.rst b/docs/apache-airflow-providers-amazon/transfer/s3_to_sql.rst index 590b625413cdd..90899d6a399d3 100644 --- a/docs/apache-airflow-providers-amazon/transfer/s3_to_sql.rst +++ b/docs/apache-airflow-providers-amazon/transfer/s3_to_sql.rst @@ -43,7 +43,7 @@ To get more information about this operator visit: Example usage with a parser for a csv file. This parser loads the file into memory and returns a list of rows: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3_to_sql.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_sql.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_sql] @@ -52,7 +52,7 @@ file into memory and returns a list of rows: Example usage with a parser function that returns a generator. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_s3_to_sql.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_s3_to_sql.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_sql_generator] diff --git a/docs/apache-airflow-providers-amazon/transfer/salesforce_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/salesforce_to_s3.rst index 7e08226d39217..b0a41c16699b0 100644 --- a/docs/apache-airflow-providers-amazon/transfer/salesforce_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/salesforce_to_s3.rst @@ -38,7 +38,7 @@ Extract data from Salesforce to Amazon S3 transfer operator The following example demonstrates a use case of extracting account data from a Salesforce instance and upload to an Amazon S3 bucket. -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_salesforce_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_salesforce_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_salesforce_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/sftp_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/sftp_to_s3.rst index 0f8dfd79ed4b6..821bf211097b4 100644 --- a/docs/apache-airflow-providers-amazon/transfer/sftp_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/sftp_to_s3.rst @@ -42,7 +42,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sftp_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sftp_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_sftp_to_s3] diff --git a/docs/apache-airflow-providers-amazon/transfer/sql_to_s3.rst b/docs/apache-airflow-providers-amazon/transfer/sql_to_s3.rst index e01d415942934..5e088f3a12759 100644 --- a/docs/apache-airflow-providers-amazon/transfer/sql_to_s3.rst +++ b/docs/apache-airflow-providers-amazon/transfer/sql_to_s3.rst @@ -44,7 +44,7 @@ To get more information about this operator visit: Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sql_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sql_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_sql_to_s3] @@ -57,7 +57,7 @@ We can group the data in the table by passing the ``groupby_kwargs`` param. This Example usage: -.. exampleinclude:: /../../tests/system/providers/amazon/aws/example_sql_to_s3.py +.. exampleinclude:: /../../providers/tests/system/amazon/aws/example_sql_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_sql_to_s3_with_groupby_param] diff --git a/docs/apache-airflow-providers-apache-beam/changelog.rst b/docs/apache-airflow-providers-apache-beam/changelog.rst index 4c423b292bd9e..7728c11ed6a62 100644 --- a/docs/apache-airflow-providers-apache-beam/changelog.rst +++ b/docs/apache-airflow-providers-apache-beam/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/beam/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/beam/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-beam/index.rst b/docs/apache-airflow-providers-apache-beam/index.rst index a8b7396b6706e..7b311c10fcf4a 100644 --- a/docs/apache-airflow-providers-apache-beam/index.rst +++ b/docs/apache-airflow-providers-apache-beam/index.rst @@ -40,7 +40,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/beam/index> + System Tests <_api/tests/system/apache/beam/index> .. toctree:: :hidden: @@ -48,7 +48,7 @@ :caption: Resources PyPI Repository - Example DAGs + Example DAGs .. toctree:: :hidden: diff --git a/docs/apache-airflow-providers-apache-beam/operators.rst b/docs/apache-airflow-providers-apache-beam/operators.rst index b536c514f7851..da4e696d2ba73 100644 --- a/docs/apache-airflow-providers-apache-beam/operators.rst +++ b/docs/apache-airflow-providers-apache-beam/operators.rst @@ -54,13 +54,13 @@ recommend avoiding unless the Dataflow job requires it. Python Pipelines with DirectRunner ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_python.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_python.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_direct_runner_pipeline_local_file] :end-before: [END howto_operator_start_python_direct_runner_pipeline_local_file] -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_python.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_python.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_direct_runner_pipeline_gcs_file] @@ -71,13 +71,13 @@ possibility to free up the worker when it knows it has to wait, and hand off the As a result, while it is suspended (deferred), it is not taking up a worker slot and your cluster will have a lot less resources wasted on idle Operators or Sensors: -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_python_async.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_python_async.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_direct_runner_pipeline_local_file_async] :end-before: [END howto_operator_start_python_direct_runner_pipeline_local_file_async] -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_python_async.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_python_async.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_direct_runner_pipeline_gcs_file_async] @@ -86,13 +86,13 @@ lot less resources wasted on idle Operators or Sensors: Python Pipelines with DataflowRunner ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_python.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_python.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_dataflow_runner_pipeline_gcs_file] :end-before: [END howto_operator_start_python_dataflow_runner_pipeline_gcs_file] -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_python_dataflow.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_python_dataflow.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_dataflow_runner_pipeline_async_gcs_file] @@ -104,7 +104,7 @@ possibility to free up the worker when it knows it has to wait, and hand off the As a result, while it is suspended (deferred), it is not taking up a worker slot and your cluster will have a lot less resources wasted on idle Operators or Sensors: -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_python_async.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_python_async.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_dataflow_runner_pipeline_gcs_file_async] @@ -126,7 +126,7 @@ has the ability to download or available on the local filesystem (provide the ab Java Pipelines with DirectRunner ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_beam.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_beam.py :language: python :dedent: 4 :start-after: [START howto_operator_start_java_direct_runner_pipeline] @@ -135,7 +135,7 @@ Java Pipelines with DirectRunner Java Pipelines with DataflowRunner ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_java_dataflow.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_java_dataflow.py :language: python :dedent: 4 :start-after: [START howto_operator_start_java_dataflow_runner_pipeline] @@ -159,13 +159,13 @@ init the module and install dependencies with ``go run init example.com/main`` a Go Pipelines with DirectRunner ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_go.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_go.py :language: python :dedent: 4 :start-after: [START howto_operator_start_go_direct_runner_pipeline_local_file] :end-before: [END howto_operator_start_go_direct_runner_pipeline_local_file] -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_go.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_go.py :language: python :dedent: 4 :start-after: [START howto_operator_start_go_direct_runner_pipeline_gcs_file] @@ -174,13 +174,13 @@ Go Pipelines with DirectRunner Go Pipelines with DataflowRunner ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_go.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_go.py :language: python :dedent: 4 :start-after: [START howto_operator_start_go_dataflow_runner_pipeline_gcs_file] :end-before: [END howto_operator_start_go_dataflow_runner_pipeline_gcs_file] -.. exampleinclude:: /../../tests/system/providers/apache/beam/example_go_dataflow.py +.. exampleinclude:: /../../providers/tests/system/apache/beam/example_go_dataflow.py :language: python :dedent: 4 :start-after: [START howto_operator_start_go_dataflow_runner_pipeline_async_gcs_file] diff --git a/docs/apache-airflow-providers-apache-cassandra/changelog.rst b/docs/apache-airflow-providers-apache-cassandra/changelog.rst index 3fce287c1c072..2119d2c002672 100644 --- a/docs/apache-airflow-providers-apache-cassandra/changelog.rst +++ b/docs/apache-airflow-providers-apache-cassandra/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/cassandra/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/cassandra/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-cassandra/index.rst b/docs/apache-airflow-providers-apache-cassandra/index.rst index 0ca1a4f138045..0a3e52999e40b 100644 --- a/docs/apache-airflow-providers-apache-cassandra/index.rst +++ b/docs/apache-airflow-providers-apache-cassandra/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/cassandra/index> + System Tests <_api/tests/system/apache/cassandra/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-cassandra/operators.rst b/docs/apache-airflow-providers-apache-cassandra/operators.rst index e0a1e10b559a7..24bc51ef9027d 100644 --- a/docs/apache-airflow-providers-apache-cassandra/operators.rst +++ b/docs/apache-airflow-providers-apache-cassandra/operators.rst @@ -50,7 +50,7 @@ Use the ``keys`` parameter to poke until the provided record is found. The exist Example use of these sensors ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. exampleinclude:: /../../tests/system/providers/apache/cassandra/example_cassandra_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/cassandra/example_cassandra_dag.py :language: python :start-after: [START howto_operator_cassandra_sensors] :end-before: [END howto_operator_cassandra_sensors] diff --git a/docs/apache-airflow-providers-apache-drill/changelog.rst b/docs/apache-airflow-providers-apache-drill/changelog.rst index f2795b435a72b..79971613d2f63 100644 --- a/docs/apache-airflow-providers-apache-drill/changelog.rst +++ b/docs/apache-airflow-providers-apache-drill/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/drill/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/drill/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-drill/index.rst b/docs/apache-airflow-providers-apache-drill/index.rst index cf05944534b7c..8ba9ef37a6672 100644 --- a/docs/apache-airflow-providers-apache-drill/index.rst +++ b/docs/apache-airflow-providers-apache-drill/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/drill/index> + System Tests <_api/tests/system/apache/drill/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-drill/operators.rst b/docs/apache-airflow-providers-apache-drill/operators.rst index 7b647c0a2efe4..47784b67fedc3 100644 --- a/docs/apache-airflow-providers-apache-drill/operators.rst +++ b/docs/apache-airflow-providers-apache-drill/operators.rst @@ -39,7 +39,7 @@ The ``sql`` parameter can be templated and be an external ``.sql`` file. Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/drill/example_drill_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/drill/example_drill_dag.py :language: python :dedent: 4 :start-after: [START howto_operator_drill] diff --git a/docs/apache-airflow-providers-apache-druid/changelog.rst b/docs/apache-airflow-providers-apache-druid/changelog.rst index f1d8377ab560f..652948c8ee8c2 100644 --- a/docs/apache-airflow-providers-apache-druid/changelog.rst +++ b/docs/apache-airflow-providers-apache-druid/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/druid/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/druid/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-druid/index.rst b/docs/apache-airflow-providers-apache-druid/index.rst index a351b7a40e353..0b920743839c3 100644 --- a/docs/apache-airflow-providers-apache-druid/index.rst +++ b/docs/apache-airflow-providers-apache-druid/index.rst @@ -47,7 +47,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/druid/index> + System Tests <_api/tests/system/apache/druid/index> .. toctree:: @@ -57,7 +57,7 @@ PyPI Repository Installing from sources - Example DAGs + Example DAGs .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-apache-druid/operators.rst b/docs/apache-airflow-providers-apache-druid/operators.rst index 6930e7b4d3ae3..758c51c538538 100644 --- a/docs/apache-airflow-providers-apache-druid/operators.rst +++ b/docs/apache-airflow-providers-apache-druid/operators.rst @@ -38,7 +38,7 @@ For parameter definition take a look at :class:`~airflow.providers.apache.druid. Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/druid/example_druid_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/druid/example_druid_dag.py :language: python :dedent: 4 :start-after: [START howto_operator_druid_submit] diff --git a/docs/apache-airflow-providers-apache-flink/changelog.rst b/docs/apache-airflow-providers-apache-flink/changelog.rst index c6c5d10cb7eb4..07ffea0939e6f 100644 --- a/docs/apache-airflow-providers-apache-flink/changelog.rst +++ b/docs/apache-airflow-providers-apache-flink/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/flink/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/flink/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-flink/index.rst b/docs/apache-airflow-providers-apache-flink/index.rst index 09ede32f377f5..a909ecfb74006 100644 --- a/docs/apache-airflow-providers-apache-flink/index.rst +++ b/docs/apache-airflow-providers-apache-flink/index.rst @@ -47,7 +47,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-hdfs/changelog.rst b/docs/apache-airflow-providers-apache-hdfs/changelog.rst index 5504c69287833..3c984d0e1129a 100644 --- a/docs/apache-airflow-providers-apache-hdfs/changelog.rst +++ b/docs/apache-airflow-providers-apache-hdfs/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/hdfs/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/hdfs/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-hive/changelog.rst b/docs/apache-airflow-providers-apache-hive/changelog.rst index 59bdc59e2d44f..838d1ce4de318 100644 --- a/docs/apache-airflow-providers-apache-hive/changelog.rst +++ b/docs/apache-airflow-providers-apache-hive/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/hive/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/hive/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-hive/index.rst b/docs/apache-airflow-providers-apache-hive/index.rst index f99065fba4aa9..7a78a85eff547 100644 --- a/docs/apache-airflow-providers-apache-hive/index.rst +++ b/docs/apache-airflow-providers-apache-hive/index.rst @@ -50,14 +50,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/hive/index> + System Tests <_api/tests/system/apache/hive/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources Macros diff --git a/docs/apache-airflow-providers-apache-hive/operators.rst b/docs/apache-airflow-providers-apache-hive/operators.rst index 7a92cba9f2dd1..d6d65a893a9dd 100644 --- a/docs/apache-airflow-providers-apache-hive/operators.rst +++ b/docs/apache-airflow-providers-apache-hive/operators.rst @@ -27,7 +27,7 @@ HiveOperator This operator executes hql code or hive script in a specific Hive database. -.. exampleinclude:: /../../tests/system/providers/apache/hive/example_twitter_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/hive/example_twitter_dag.py :language: python :dedent: 4 :start-after: [START create_hive] diff --git a/docs/apache-airflow-providers-apache-iceberg/changelog.rst b/docs/apache-airflow-providers-apache-iceberg/changelog.rst index daefe9612ee68..220a77a762b68 100644 --- a/docs/apache-airflow-providers-apache-iceberg/changelog.rst +++ b/docs/apache-airflow-providers-apache-iceberg/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/iceberg/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/iceberg/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-iceberg/index.rst b/docs/apache-airflow-providers-apache-iceberg/index.rst index da33da6e4dd39..6bcfd228a8a84 100644 --- a/docs/apache-airflow-providers-apache-iceberg/index.rst +++ b/docs/apache-airflow-providers-apache-iceberg/index.rst @@ -41,7 +41,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/iceberg/index> + System Tests <_api/tests/system/apache/iceberg/index> .. toctree:: @@ -49,7 +49,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources Python API <_api/airflow/providers/apache/iceberg/index> diff --git a/docs/apache-airflow-providers-apache-impala/changelog.rst b/docs/apache-airflow-providers-apache-impala/changelog.rst index 8f7e22d898972..ad7e0972ce927 100644 --- a/docs/apache-airflow-providers-apache-impala/changelog.rst +++ b/docs/apache-airflow-providers-apache-impala/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/impala/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/impala/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-kafka/changelog.rst b/docs/apache-airflow-providers-apache-kafka/changelog.rst index 258e2231535f1..62b89d1ca33e1 100644 --- a/docs/apache-airflow-providers-apache-kafka/changelog.rst +++ b/docs/apache-airflow-providers-apache-kafka/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/kafka/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/kafka/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-kafka/index.rst b/docs/apache-airflow-providers-apache-kafka/index.rst index 0879b57fb7f1b..9575d35490b52 100644 --- a/docs/apache-airflow-providers-apache-kafka/index.rst +++ b/docs/apache-airflow-providers-apache-kafka/index.rst @@ -53,7 +53,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/kafka/index> + System Tests <_api/tests/system/apache/kafka/index> .. toctree:: @@ -61,7 +61,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-kafka/operators/index.rst b/docs/apache-airflow-providers-apache-kafka/operators/index.rst index da5e99b4b7ec1..47ae3d63615f1 100644 --- a/docs/apache-airflow-providers-apache-kafka/operators/index.rst +++ b/docs/apache-airflow-providers-apache-kafka/operators/index.rst @@ -33,7 +33,7 @@ For parameter definitions take a look at :class:`~airflow.providers.apache.kafka Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/kafka/example_dag_hello_kafka.py +.. exampleinclude:: /../../providers/tests/system/apache/kafka/example_dag_hello_kafka.py :language: python :dedent: 4 :start-after: [START howto_operator_consume_from_topic] @@ -58,7 +58,7 @@ For parameter definitions take a look at :class:`~airflow.providers.apache.kafka Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/kafka/example_dag_hello_kafka.py +.. exampleinclude:: /../../providers/tests/system/apache/kafka/example_dag_hello_kafka.py :language: python :dedent: 4 :start-after: [START howto_operator_produce_to_topic] diff --git a/docs/apache-airflow-providers-apache-kafka/sensors.rst b/docs/apache-airflow-providers-apache-kafka/sensors.rst index 4014154640fea..02fd89e5ed642 100644 --- a/docs/apache-airflow-providers-apache-kafka/sensors.rst +++ b/docs/apache-airflow-providers-apache-kafka/sensors.rst @@ -35,7 +35,7 @@ Using the sensor """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/kafka/example_dag_hello_kafka.py +.. exampleinclude:: /../../providers/tests/system/apache/kafka/example_dag_hello_kafka.py :language: python :dedent: 4 :start-after: [START howto_sensor_await_message] @@ -62,7 +62,7 @@ For parameter definitions take a look at :class:`~airflow.providers.apache.kafka Using the sensor """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/kafka/example_dag_event_listener.py +.. exampleinclude:: /../../providers/tests/system/apache/kafka/example_dag_event_listener.py :language: python :dedent: 4 :start-after: [START howto_sensor_await_message_trigger_function] diff --git a/docs/apache-airflow-providers-apache-kylin/changelog.rst b/docs/apache-airflow-providers-apache-kylin/changelog.rst index 2fbe89478c990..af326254ace9b 100644 --- a/docs/apache-airflow-providers-apache-kylin/changelog.rst +++ b/docs/apache-airflow-providers-apache-kylin/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/kylin/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/kylin/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-kylin/index.rst b/docs/apache-airflow-providers-apache-kylin/index.rst index f38208c5bab48..b496932307d0f 100644 --- a/docs/apache-airflow-providers-apache-kylin/index.rst +++ b/docs/apache-airflow-providers-apache-kylin/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/kylin/index> + System Tests <_api/tests/system/apache/kylin/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-livy/changelog.rst b/docs/apache-airflow-providers-apache-livy/changelog.rst index 5b0b68391eb12..cda7f99ad754e 100644 --- a/docs/apache-airflow-providers-apache-livy/changelog.rst +++ b/docs/apache-airflow-providers-apache-livy/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/livy/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/livy/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-livy/index.rst b/docs/apache-airflow-providers-apache-livy/index.rst index f662f0efdbfda..818766c0dd3ae 100644 --- a/docs/apache-airflow-providers-apache-livy/index.rst +++ b/docs/apache-airflow-providers-apache-livy/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/livy/index> + System Tests <_api/tests/system/apache/livy/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-livy/operators.rst b/docs/apache-airflow-providers-apache-livy/operators.rst index 5ef0e0b71a591..851f524c2a840 100644 --- a/docs/apache-airflow-providers-apache-livy/operators.rst +++ b/docs/apache-airflow-providers-apache-livy/operators.rst @@ -29,7 +29,7 @@ LivyOperator This operator wraps the Apache Livy batch REST API, allowing to submit a Spark application to the underlying cluster. -.. exampleinclude:: /../../tests/system/providers/apache/livy/example_livy.py +.. exampleinclude:: /../../providers/tests/system/apache/livy/example_livy.py :language: python :start-after: [START create_livy] :end-before: [END create_livy] @@ -38,7 +38,7 @@ You can also run this operator in deferrable mode by setting the parameter ``def This will lead to efficient utilization of Airflow workers as polling for job status happens on the triggerer asynchronously. Note that this will need triggerer to be available on your Airflow deployment. -.. exampleinclude:: /../../tests/system/providers/apache/livy/example_livy.py +.. exampleinclude:: /../../providers/tests/system/apache/livy/example_livy.py :language: python :start-after: [START create_livy_deferrable] :end-before: [END create_livy_deferrable] diff --git a/docs/apache-airflow-providers-apache-pig/changelog.rst b/docs/apache-airflow-providers-apache-pig/changelog.rst index d1b5cc0aa478a..e6c7d50b406bc 100644 --- a/docs/apache-airflow-providers-apache-pig/changelog.rst +++ b/docs/apache-airflow-providers-apache-pig/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/pig/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/pig/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-pig/index.rst b/docs/apache-airflow-providers-apache-pig/index.rst index 672c07de9a8b0..efab88e3f964c 100644 --- a/docs/apache-airflow-providers-apache-pig/index.rst +++ b/docs/apache-airflow-providers-apache-pig/index.rst @@ -47,14 +47,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/pig/index> + System Tests <_api/tests/system/apache/pig/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-pig/operators.rst b/docs/apache-airflow-providers-apache-pig/operators.rst index 42be0eeac496a..601970051abc7 100644 --- a/docs/apache-airflow-providers-apache-pig/operators.rst +++ b/docs/apache-airflow-providers-apache-pig/operators.rst @@ -26,7 +26,7 @@ Pig programs are amenable to substantial parallelization, which in turns enables Use the :class:`~airflow.providers.apache.pig.operators.pig.PigOperator` to execute a pig script. -.. exampleinclude:: /../../tests/system/providers/apache/pig/example_pig.py +.. exampleinclude:: /../../providers/tests/system/apache/pig/example_pig.py :language: python :start-after: [START create_pig] :end-before: [END create_pig] diff --git a/docs/apache-airflow-providers-apache-pinot/changelog.rst b/docs/apache-airflow-providers-apache-pinot/changelog.rst index 82c9498bc3dd0..6ec0eddf207dc 100644 --- a/docs/apache-airflow-providers-apache-pinot/changelog.rst +++ b/docs/apache-airflow-providers-apache-pinot/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/pinot/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/pinot/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-pinot/index.rst b/docs/apache-airflow-providers-apache-pinot/index.rst index 32cf0f675af5c..4eb704a923416 100644 --- a/docs/apache-airflow-providers-apache-pinot/index.rst +++ b/docs/apache-airflow-providers-apache-pinot/index.rst @@ -40,7 +40,7 @@ :maxdepth: 1 :caption: References - Example DAGs + Example DAGs Python API <_api/airflow/providers/apache/pinot/index> PyPI Repository Installing from sources @@ -50,7 +50,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/pinot/index> + System Tests <_api/tests/system/apache/pinot/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-apache-pinot/operators.rst b/docs/apache-airflow-providers-apache-pinot/operators.rst index dba3b2a059590..0882d856ca1c3 100644 --- a/docs/apache-airflow-providers-apache-pinot/operators.rst +++ b/docs/apache-airflow-providers-apache-pinot/operators.rst @@ -40,7 +40,7 @@ Parameters For parameter definition, take a look at :class:`~airflow.providers.apache.pinot.hooks.pinot.PinotAdminHook` -.. exampleinclude:: /../../tests/system/providers/apache/pinot/example_pinot_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/pinot/example_pinot_dag.py :language: python :dedent: 4 :start-after: [START howto_operator_pinot_admin_hook] @@ -61,7 +61,7 @@ Parameters For parameter definition, take a look at :class:`~airflow.providers.apache.pinot.hooks.pinot.PinotDbApiHook` -.. exampleinclude:: /../../tests/system/providers/apache/pinot/example_pinot_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/pinot/example_pinot_dag.py :language: python :dedent: 4 :start-after: [START howto_operator_pinot_dbapi_example] diff --git a/docs/apache-airflow-providers-apache-spark/changelog.rst b/docs/apache-airflow-providers-apache-spark/changelog.rst index a21049f9e8386..7714d7d18e435 100644 --- a/docs/apache-airflow-providers-apache-spark/changelog.rst +++ b/docs/apache-airflow-providers-apache-spark/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apache/spark/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apache/spark/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-apache-spark/decorators/pyspark.rst b/docs/apache-airflow-providers-apache-spark/decorators/pyspark.rst index 1755e079b9214..ba61f31e9d993 100644 --- a/docs/apache-airflow-providers-apache-spark/decorators/pyspark.rst +++ b/docs/apache-airflow-providers-apache-spark/decorators/pyspark.rst @@ -44,7 +44,7 @@ Example The following example shows how to use the ``@task.pyspark`` decorator. Note that the ``spark`` and ``sc`` objects are injected into the function. -.. exampleinclude:: /../../tests/system/providers/apache/spark/example_pyspark.py +.. exampleinclude:: /../../providers/tests/system/apache/spark/example_pyspark.py :language: python :dedent: 4 :start-after: [START task_pyspark] diff --git a/docs/apache-airflow-providers-apache-spark/index.rst b/docs/apache-airflow-providers-apache-spark/index.rst index 6e36428a46291..4d96f64dfc935 100644 --- a/docs/apache-airflow-providers-apache-spark/index.rst +++ b/docs/apache-airflow-providers-apache-spark/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/apache/spark/index> + System Tests <_api/tests/system/apache/spark/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-apache-spark/operators.rst b/docs/apache-airflow-providers-apache-spark/operators.rst index f6c20985f24cf..63e728b57332e 100644 --- a/docs/apache-airflow-providers-apache-spark/operators.rst +++ b/docs/apache-airflow-providers-apache-spark/operators.rst @@ -44,7 +44,7 @@ Using the operator Using ``cmd_type`` parameter, is possible to transfer data from Spark to a database (``spark_to_jdbc``) or from a database to Spark (``jdbc_to_spark``), which will write the table using the Spark command ``saveAsTable``. -.. exampleinclude:: /../../tests/system/providers/apache/spark/example_spark_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/spark/example_spark_dag.py :language: python :dedent: 4 :start-after: [START howto_operator_spark_jdbc] @@ -69,7 +69,7 @@ For parameter definition take a look at :class:`~airflow.providers.apache.spark. Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/spark/example_spark_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/spark/example_spark_dag.py :language: python :dedent: 4 :start-after: [START howto_operator_spark_sql] @@ -92,7 +92,7 @@ For parameter definition take a look at :class:`~airflow.providers.apache.spark. Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/apache/spark/example_spark_dag.py +.. exampleinclude:: /../../providers/tests/system/apache/spark/example_spark_dag.py :language: python :dedent: 4 :start-after: [START howto_operator_spark_submit] diff --git a/docs/apache-airflow-providers-apprise/changelog.rst b/docs/apache-airflow-providers-apprise/changelog.rst index 3634dd238a43b..202bf0b8520cf 100644 --- a/docs/apache-airflow-providers-apprise/changelog.rst +++ b/docs/apache-airflow-providers-apprise/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/apprise/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/apprise/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-arangodb/changelog.rst b/docs/apache-airflow-providers-arangodb/changelog.rst index c3ddda82157eb..c4229fd8a9c80 100644 --- a/docs/apache-airflow-providers-arangodb/changelog.rst +++ b/docs/apache-airflow-providers-arangodb/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/arangodb/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/arangodb/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-arangodb/operators/index.rst b/docs/apache-airflow-providers-arangodb/operators/index.rst index 33b4e10064c48..186e000a984b5 100644 --- a/docs/apache-airflow-providers-arangodb/operators/index.rst +++ b/docs/apache-airflow-providers-arangodb/operators/index.rst @@ -32,7 +32,7 @@ Callable as you like. An example of Listing all Documents in **students** collection can be implemented as following: -.. exampleinclude:: /../../airflow/providers/arangodb/example_dags/example_arangodb.py +.. exampleinclude:: /../../providers/src/airflow/providers/arangodb/example_dags/example_arangodb.py :language: python :start-after: [START howto_aql_operator_arangodb] :end-before: [END howto_aql_operator_arangodb] @@ -40,7 +40,7 @@ An example of Listing all Documents in **students** collection can be implemente You can also provide file template (.sql) to load query, remember path is relative to **dags/** folder, if you want to provide any other path please provide **template_searchpath** while creating **DAG** object, -.. exampleinclude:: /../../airflow/providers/arangodb/example_dags/example_arangodb.py +.. exampleinclude:: /../../providers/src/airflow/providers/arangodb/example_dags/example_arangodb.py :language: python :start-after: [START howto_aql_operator_template_file_arangodb] :end-before: [END howto_aql_operator_template_file_arangodb] @@ -53,14 +53,14 @@ AQL query in `ArangoDB `__. An example for waiting a document in **students** collection with student name **judy** can be implemented as following: -.. exampleinclude:: /../../airflow/providers/arangodb/example_dags/example_arangodb.py +.. exampleinclude:: /../../providers/src/airflow/providers/arangodb/example_dags/example_arangodb.py :language: python :start-after: [START howto_aql_sensor_arangodb] :end-before: [END howto_aql_sensor_arangodb] Similar to **AQLOperator**, You can also provide file template to load query - -.. exampleinclude:: /../../airflow/providers/arangodb/example_dags/example_arangodb.py +.. exampleinclude:: /../../providers/src/airflow/providers/arangodb/example_dags/example_arangodb.py :language: python :start-after: [START howto_aql_sensor_template_file_arangodb] :end-before: [END howto_aql_sensor_template_file_arangodb] diff --git a/docs/apache-airflow-providers-asana/changelog.rst b/docs/apache-airflow-providers-asana/changelog.rst index 92a6d94d0f8d4..ae927055eb9f1 100644 --- a/docs/apache-airflow-providers-asana/changelog.rst +++ b/docs/apache-airflow-providers-asana/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/asana/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/asana/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-asana/index.rst b/docs/apache-airflow-providers-asana/index.rst index ce04f8b4ae871..edc88f080c3f5 100644 --- a/docs/apache-airflow-providers-asana/index.rst +++ b/docs/apache-airflow-providers-asana/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/asana/index> + System Tests <_api/tests/system/asana/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-asana/operators/asana.rst b/docs/apache-airflow-providers-asana/operators/asana.rst index aff9f9f51e3db..5c9cdd0def8ad 100644 --- a/docs/apache-airflow-providers-asana/operators/asana.rst +++ b/docs/apache-airflow-providers-asana/operators/asana.rst @@ -86,7 +86,7 @@ the Asana connection to use to connect to your account (``conn_id``). There are `task attributes you can overwrite `_ through the ``task_parameters``. -.. exampleinclude:: /../../tests/system/providers/asana/example_asana.py +.. exampleinclude:: /../../providers/tests/system/asana/example_asana.py :language: python :dedent: 4 :start-after: [START asana_example_dag] diff --git a/docs/apache-airflow-providers-atlassian-jira/changelog.rst b/docs/apache-airflow-providers-atlassian-jira/changelog.rst index 6e1c12bdf4035..5ea7dfff3dea2 100644 --- a/docs/apache-airflow-providers-atlassian-jira/changelog.rst +++ b/docs/apache-airflow-providers-atlassian-jira/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/atlassian/jira/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/atlassian/jira/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-celery/changelog.rst b/docs/apache-airflow-providers-celery/changelog.rst index b9abe5dbf53e3..c1b38edc7d933 100644 --- a/docs/apache-airflow-providers-celery/changelog.rst +++ b/docs/apache-airflow-providers-celery/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/celery/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/celery/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-cloudant/changelog.rst b/docs/apache-airflow-providers-cloudant/changelog.rst index 854f5fda01e2f..d969e082c17b2 100644 --- a/docs/apache-airflow-providers-cloudant/changelog.rst +++ b/docs/apache-airflow-providers-cloudant/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/cloudant/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/cloudant/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-cncf-kubernetes/changelog.rst b/docs/apache-airflow-providers-cncf-kubernetes/changelog.rst index 493abbcdb8487..6ad86cec6753c 100644 --- a/docs/apache-airflow-providers-cncf-kubernetes/changelog.rst +++ b/docs/apache-airflow-providers-cncf-kubernetes/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/cncf/kubernetes/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/cncf/kubernetes/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-cncf-kubernetes/index.rst b/docs/apache-airflow-providers-cncf-kubernetes/index.rst index 1db81c09a3f6d..e0da7d26e1b42 100644 --- a/docs/apache-airflow-providers-cncf-kubernetes/index.rst +++ b/docs/apache-airflow-providers-cncf-kubernetes/index.rst @@ -59,14 +59,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/cncf/kubernetes/index> + System Tests <_api/tests/system/cncf/kubernetes/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-cncf-kubernetes/kubernetes_executor.rst b/docs/apache-airflow-providers-cncf-kubernetes/kubernetes_executor.rst index 57d0a26742213..a85a793712885 100644 --- a/docs/apache-airflow-providers-cncf-kubernetes/kubernetes_executor.rst +++ b/docs/apache-airflow-providers-cncf-kubernetes/kubernetes_executor.rst @@ -108,21 +108,21 @@ With these requirements in mind, here are some examples of basic ``pod_template_ Storing DAGs in the image: -.. literalinclude:: /../../airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml +.. literalinclude:: /../../providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml :language: yaml :start-after: [START template_with_dags_in_image] :end-before: [END template_with_dags_in_image] Storing DAGs in a ``persistentVolume``: -.. literalinclude:: /../../airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml +.. literalinclude:: /../../providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml :language: yaml :start-after: [START template_with_dags_in_volume] :end-before: [END template_with_dags_in_volume] Pulling DAGs from ``git``: -.. literalinclude:: /../../airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml +.. literalinclude:: /../../providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml :language: yaml :start-after: [START git_sync_template] :end-before: [END git_sync_template] diff --git a/docs/apache-airflow-providers-cncf-kubernetes/operators.rst b/docs/apache-airflow-providers-cncf-kubernetes/operators.rst index 6d5b1414488af..2268a8655c987 100644 --- a/docs/apache-airflow-providers-cncf-kubernetes/operators.rst +++ b/docs/apache-airflow-providers-cncf-kubernetes/operators.rst @@ -102,7 +102,7 @@ Using this method will ensure correctness and type safety. While we have removed almost all Kubernetes convenience classes, we have kept the :class:`~airflow.providers.cncf.kubernetes.secret.Secret` class to simplify the process of generating secret volumes/env variables. -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes.py :language: python :start-after: [START howto_operator_k8s_cluster_resources] :end-before: [END howto_operator_k8s_cluster_resources] @@ -135,21 +135,21 @@ Create the Secret using ``kubectl``: Then use it in your pod like so: -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes.py :language: python :start-after: [START howto_operator_k8s_private_image] :end-before: [END howto_operator_k8s_private_image] Also for this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_async.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_async.py :language: python :start-after: [START howto_operator_k8s_private_image_async] :end-before: [END howto_operator_k8s_private_image_async] Example to fetch and display container log periodically -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_async.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_async.py :language: python :start-after: [START howto_operator_async_log] :end-before: [END howto_operator_async_log] @@ -168,7 +168,7 @@ alongside the Pod. The Pod must write the XCom value into this location at the ` See the following example on how this occurs: -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes.py :language: python :start-after: [START howto_operator_k8s_write_xcom] :end-before: [END howto_operator_k8s_write_xcom] @@ -177,7 +177,7 @@ See the following example on how this occurs: Also for this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_async.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_async.py :language: python :start-after: [START howto_operator_k8s_write_xcom_async] :end-before: [END howto_operator_k8s_write_xcom_async] @@ -621,7 +621,7 @@ request that dynamically launches this Job. Users can specify a kubeconfig file using the ``config_file`` parameter, otherwise the operator will default to ``~/.kube/config``. It also allows users to supply a template YAML file using the ``job_template_file`` parameter. -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_job.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_job.py :language: python :dedent: 4 :start-after: [START howto_operator_k8s_job] @@ -629,7 +629,7 @@ to ``~/.kube/config``. It also allows users to supply a template YAML file using The :class:`~airflow.providers.cncf.kubernetes.operators.job.KubernetesJobOperator` also supports deferrable mode: -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_job.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_job.py :language: python :dedent: 4 :start-after: [START howto_operator_k8s_job_deferrable] @@ -656,7 +656,7 @@ KubernetesDeleteJobOperator The :class:`~airflow.providers.cncf.kubernetes.operators.job.KubernetesDeleteJobOperator` allows you to delete Jobs on a Kubernetes cluster. -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_job.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_job.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_k8s_job] @@ -671,7 +671,7 @@ KubernetesPatchJobOperator The :class:`~airflow.providers.cncf.kubernetes.operators.job.KubernetesPatchJobOperator` allows you to update Jobs on a Kubernetes cluster. -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_job.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_job.py :language: python :dedent: 4 :start-after: [START howto_operator_update_job] diff --git a/docs/apache-airflow-providers-cohere/changelog.rst b/docs/apache-airflow-providers-cohere/changelog.rst index f4e96e1909fb8..aa7753b85cd65 100644 --- a/docs/apache-airflow-providers-cohere/changelog.rst +++ b/docs/apache-airflow-providers-cohere/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/cohere/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/cohere/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-cohere/index.rst b/docs/apache-airflow-providers-cohere/index.rst index 8909c2bd72dbf..3ae62208a98fe 100644 --- a/docs/apache-airflow-providers-cohere/index.rst +++ b/docs/apache-airflow-providers-cohere/index.rst @@ -51,7 +51,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/cohere/index> + System Tests <_api/tests/system/cohere/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-cohere/operators/embedding.rst b/docs/apache-airflow-providers-cohere/operators/embedding.rst index b765fe8e9d07c..18adf619ce6d7 100644 --- a/docs/apache-airflow-providers-cohere/operators/embedding.rst +++ b/docs/apache-airflow-providers-cohere/operators/embedding.rst @@ -33,7 +33,7 @@ connect to your account. Example Code: ------------- -.. exampleinclude:: /../../tests/system/providers/cohere/example_cohere_embedding_operator.py +.. exampleinclude:: /../../providers/tests/system/cohere/example_cohere_embedding_operator.py :language: python :dedent: 4 :start-after: [START howto_operator_cohere_embedding] diff --git a/docs/apache-airflow-providers-common-compat/changelog.rst b/docs/apache-airflow-providers-common-compat/changelog.rst index 32438992010a2..074a237efcc16 100644 --- a/docs/apache-airflow-providers-common-compat/changelog.rst +++ b/docs/apache-airflow-providers-common-compat/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/common/compat/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/common/compat/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-common-io/changelog.rst b/docs/apache-airflow-providers-common-io/changelog.rst index 16dec7d266c36..8662eabd816e1 100644 --- a/docs/apache-airflow-providers-common-io/changelog.rst +++ b/docs/apache-airflow-providers-common-io/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/common/io/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/common/io/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-common-io/index.rst b/docs/apache-airflow-providers-common-io/index.rst index 606b6c5161bcd..ac0f1433d20cd 100644 --- a/docs/apache-airflow-providers-common-io/index.rst +++ b/docs/apache-airflow-providers-common-io/index.rst @@ -51,14 +51,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/common/io/index> + System Tests <_api/tests/system/common/io/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-common-io/operators.rst b/docs/apache-airflow-providers-common-io/operators.rst index 12b4a1c207ff0..fee5bb8eac5f1 100644 --- a/docs/apache-airflow-providers-common-io/operators.rst +++ b/docs/apache-airflow-providers-common-io/operators.rst @@ -40,7 +40,7 @@ Otherwise the data will be streamed from the source to the destination. The example below shows how to instantiate the FileTransferOperator task. -.. exampleinclude:: /../../tests/system/providers/common/io/example_file_transfer_local_to_s3.py +.. exampleinclude:: /../../providers/tests/system/common/io/example_file_transfer_local_to_s3.py :language: python :dedent: 4 :start-after: [START howto_transfer_local_to_s3] diff --git a/docs/apache-airflow-providers-common-sql/changelog.rst b/docs/apache-airflow-providers-common-sql/changelog.rst index b1f08fee1ab60..d071c9fc1a1e6 100644 --- a/docs/apache-airflow-providers-common-sql/changelog.rst +++ b/docs/apache-airflow-providers-common-sql/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/common/sql/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/common/sql/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-common-sql/index.rst b/docs/apache-airflow-providers-common-sql/index.rst index f764b57a8b680..2d554bf3da26e 100644 --- a/docs/apache-airflow-providers-common-sql/index.rst +++ b/docs/apache-airflow-providers-common-sql/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/common/sql/index> + System Tests <_api/tests/system/common/sql/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-common-sql/operators.rst b/docs/apache-airflow-providers-common-sql/operators.rst index bc725be418c03..e6e24eefb0017 100644 --- a/docs/apache-airflow-providers-common-sql/operators.rst +++ b/docs/apache-airflow-providers-common-sql/operators.rst @@ -38,7 +38,7 @@ different databases. Parameters of the operators are: The example below shows how to instantiate the SQLExecuteQueryOperator task. -.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_execute_query.py +.. exampleinclude:: /../../providers/tests/system/common/sql/example_sql_execute_query.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_execute_query] @@ -101,7 +101,7 @@ empty tables to return valid integers. The below example demonstrates how to instantiate the SQLColumnCheckOperator task. -.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_column_table_check.py +.. exampleinclude:: /../../providers/tests/system/common/sql/example_sql_column_table_check.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_column_check] @@ -141,7 +141,7 @@ checks. The below example demonstrates how to instantiate the SQLTableCheckOperator task. -.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_column_table_check.py +.. exampleinclude:: /../../providers/tests/system/common/sql/example_sql_column_table_check.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_table_check] diff --git a/docs/apache-airflow-providers-databricks/changelog.rst b/docs/apache-airflow-providers-databricks/changelog.rst index 87771d0f51998..8e4f5f126efd4 100644 --- a/docs/apache-airflow-providers-databricks/changelog.rst +++ b/docs/apache-airflow-providers-databricks/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/databricks/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/databricks/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-databricks/index.rst b/docs/apache-airflow-providers-databricks/index.rst index 5a124818ea4dc..32cd75c0ef2be 100644 --- a/docs/apache-airflow-providers-databricks/index.rst +++ b/docs/apache-airflow-providers-databricks/index.rst @@ -50,14 +50,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/databricks/index> + System Tests <_api/tests/system/databricks/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-databricks/operators/copy_into.rst b/docs/apache-airflow-providers-databricks/operators/copy_into.rst index 71a3fa9e89ad8..56eb20c662286 100644 --- a/docs/apache-airflow-providers-databricks/operators/copy_into.rst +++ b/docs/apache-airflow-providers-databricks/operators/copy_into.rst @@ -46,7 +46,7 @@ Importing CSV data An example usage of the DatabricksCopyIntoOperator to import CSV data into a table is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sql.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sql.py :language: python :start-after: [START howto_operator_databricks_copy_into] :end-before: [END howto_operator_databricks_copy_into] diff --git a/docs/apache-airflow-providers-databricks/operators/jobs_create.rst b/docs/apache-airflow-providers-databricks/operators/jobs_create.rst index 7e6765eba420a..621423f83f32d 100644 --- a/docs/apache-airflow-providers-databricks/operators/jobs_create.rst +++ b/docs/apache-airflow-providers-databricks/operators/jobs_create.rst @@ -66,7 +66,7 @@ Specifying parameters as JSON An example usage of the DatabricksCreateJobsOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_jobs_create_json] :end-before: [END howto_operator_databricks_jobs_create_json] @@ -76,7 +76,7 @@ Using named parameters You can also use named parameters to initialize the operator and run the job. -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_jobs_create_named] :end-before: [END howto_operator_databricks_jobs_create_named] @@ -87,7 +87,7 @@ Pairing with DatabricksRunNowOperator You can use the ``job_id`` that is returned by the DatabricksCreateJobsOperator in the return_value XCom as an argument to the DatabricksRunNowOperator to run the job. -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_run_now] :end-before: [END howto_operator_databricks_run_now] diff --git a/docs/apache-airflow-providers-databricks/operators/notebook.rst b/docs/apache-airflow-providers-databricks/operators/notebook.rst index b87d0d20e6f5a..bf7b04ca74caa 100644 --- a/docs/apache-airflow-providers-databricks/operators/notebook.rst +++ b/docs/apache-airflow-providers-databricks/operators/notebook.rst @@ -31,14 +31,14 @@ Examples Running a notebook in Databricks on a new cluster ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_notebook_new_cluster] :end-before: [END howto_operator_databricks_notebook_new_cluster] Running a notebook in Databricks on an existing cluster ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_notebook_existing_cluster] :end-before: [END howto_operator_databricks_notebook_existing_cluster] diff --git a/docs/apache-airflow-providers-databricks/operators/repos_create.rst b/docs/apache-airflow-providers-databricks/operators/repos_create.rst index 6611a51cd6c15..6b60eae27870b 100644 --- a/docs/apache-airflow-providers-databricks/operators/repos_create.rst +++ b/docs/apache-airflow-providers-databricks/operators/repos_create.rst @@ -63,7 +63,7 @@ Create a Databricks Repo An example usage of the DatabricksReposCreateOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_repos.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_repos.py :language: python :start-after: [START howto_operator_databricks_repo_create] :end-before: [END howto_operator_databricks_repo_create] diff --git a/docs/apache-airflow-providers-databricks/operators/repos_delete.rst b/docs/apache-airflow-providers-databricks/operators/repos_delete.rst index 74d4b62972a14..3186dd131dff7 100644 --- a/docs/apache-airflow-providers-databricks/operators/repos_delete.rst +++ b/docs/apache-airflow-providers-databricks/operators/repos_delete.rst @@ -55,7 +55,7 @@ Deleting Databricks Repo by specifying path An example usage of the DatabricksReposDeleteOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_repos.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_repos.py :language: python :start-after: [START howto_operator_databricks_repo_delete] :end-before: [END howto_operator_databricks_repo_delete] diff --git a/docs/apache-airflow-providers-databricks/operators/repos_update.rst b/docs/apache-airflow-providers-databricks/operators/repos_update.rst index 56af4edabbcf6..6893ee0107e86 100644 --- a/docs/apache-airflow-providers-databricks/operators/repos_update.rst +++ b/docs/apache-airflow-providers-databricks/operators/repos_update.rst @@ -60,7 +60,7 @@ Updating Databricks Repo by specifying path An example usage of the DatabricksReposUpdateOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_repos.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_repos.py :language: python :start-after: [START howto_operator_databricks_repo_update] :end-before: [END howto_operator_databricks_repo_update] diff --git a/docs/apache-airflow-providers-databricks/operators/sql.rst b/docs/apache-airflow-providers-databricks/operators/sql.rst index 55bbf64758562..33acacae1c6bd 100644 --- a/docs/apache-airflow-providers-databricks/operators/sql.rst +++ b/docs/apache-airflow-providers-databricks/operators/sql.rst @@ -49,7 +49,7 @@ Selecting data An example usage of the DatabricksSqlOperator to select data from a table is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sql.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sql.py :language: python :start-after: [START howto_operator_databricks_sql_select] :end-before: [END howto_operator_databricks_sql_select] @@ -59,7 +59,7 @@ Selecting data into a file An example usage of the DatabricksSqlOperator to select data from a table and store in a file is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sql.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sql.py :language: python :start-after: [START howto_operator_databricks_sql_select_file] :end-before: [END howto_operator_databricks_sql_select_file] @@ -69,7 +69,7 @@ Executing multiple statements An example usage of the DatabricksSqlOperator to perform multiple SQL statements is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sql.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sql.py :language: python :start-after: [START howto_operator_databricks_sql_multiple] :end-before: [END howto_operator_databricks_sql_multiple] @@ -80,7 +80,7 @@ Executing multiple statements from a file An example usage of the DatabricksSqlOperator to perform statements from a file is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sql.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sql.py :language: python :start-after: [START howto_operator_databricks_sql_multiple_file] :end-before: [END howto_operator_databricks_sql_multiple_file] @@ -107,7 +107,7 @@ Examples -------- Configuring Databricks connection to be used with the Sensor. -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sensors.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_databricks_connection_setup] @@ -115,7 +115,7 @@ Configuring Databricks connection to be used with the Sensor. Poking the specific table with the SQL statement: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sensors.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_databricks_sql] @@ -154,7 +154,7 @@ Examples -------- Configuring Databricks connection to be used with the Sensor. -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sensors.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_databricks_connection_setup] @@ -162,7 +162,7 @@ Configuring Databricks connection to be used with the Sensor. Poking the specific table for existence of data/partition: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_sensors.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_databricks_partition] diff --git a/docs/apache-airflow-providers-databricks/operators/submit_run.rst b/docs/apache-airflow-providers-databricks/operators/submit_run.rst index 706920458c64c..10548583cfa3f 100644 --- a/docs/apache-airflow-providers-databricks/operators/submit_run.rst +++ b/docs/apache-airflow-providers-databricks/operators/submit_run.rst @@ -113,7 +113,7 @@ Specifying parameters as JSON An example usage of the DatabricksSubmitRunOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_json] :end-before: [END howto_operator_databricks_json] @@ -123,7 +123,7 @@ Using named parameters You can also use named parameters to initialize the operator and run the job. -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_named] :end-before: [END howto_operator_databricks_named] diff --git a/docs/apache-airflow-providers-databricks/operators/task.rst b/docs/apache-airflow-providers-databricks/operators/task.rst index 476e72c494b9b..331481d915c4b 100644 --- a/docs/apache-airflow-providers-databricks/operators/task.rst +++ b/docs/apache-airflow-providers-databricks/operators/task.rst @@ -33,14 +33,14 @@ Examples Running a notebook in Databricks using DatabricksTaskOperator ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_task_notebook] :end-before: [END howto_operator_databricks_task_notebook] Running a SQL query in Databricks using DatabricksTaskOperator ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks.py :language: python :start-after: [START howto_operator_databricks_task_sql] :end-before: [END howto_operator_databricks_task_sql] diff --git a/docs/apache-airflow-providers-databricks/operators/workflow.rst b/docs/apache-airflow-providers-databricks/operators/workflow.rst index 6da38add6669e..b5c81050143a8 100644 --- a/docs/apache-airflow-providers-databricks/operators/workflow.rst +++ b/docs/apache-airflow-providers-databricks/operators/workflow.rst @@ -45,7 +45,7 @@ Examples Example of what a DAG looks like with a DatabricksWorkflowTaskGroup ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. exampleinclude:: /../../tests/system/providers/databricks/example_databricks_workflow.py +.. exampleinclude:: /../../providers/tests/system/databricks/example_databricks_workflow.py :language: python :start-after: [START howto_databricks_workflow_notebook] :end-before: [END howto_databricks_workflow_notebook] diff --git a/docs/apache-airflow-providers-datadog/changelog.rst b/docs/apache-airflow-providers-datadog/changelog.rst index e392f2d04075b..43f0ce3e63a01 100644 --- a/docs/apache-airflow-providers-datadog/changelog.rst +++ b/docs/apache-airflow-providers-datadog/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/datadog/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/datadog/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-dbt-cloud/changelog.rst b/docs/apache-airflow-providers-dbt-cloud/changelog.rst index f4b14c26b0034..be4203ad0c942 100644 --- a/docs/apache-airflow-providers-dbt-cloud/changelog.rst +++ b/docs/apache-airflow-providers-dbt-cloud/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/dbt/cloud/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/dbt/cloud/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-dbt-cloud/index.rst b/docs/apache-airflow-providers-dbt-cloud/index.rst index f9f214af8323f..82d72999eaee5 100644 --- a/docs/apache-airflow-providers-dbt-cloud/index.rst +++ b/docs/apache-airflow-providers-dbt-cloud/index.rst @@ -53,14 +53,14 @@ an Integrated Developer Environment (IDE). :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/dbt/cloud/index> + System Tests <_api/tests/system/dbt/cloud/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-dbt-cloud/operators.rst b/docs/apache-airflow-providers-dbt-cloud/operators.rst index af5b900d2340c..eaa285f6d4082 100644 --- a/docs/apache-airflow-providers-dbt-cloud/operators.rst +++ b/docs/apache-airflow-providers-dbt-cloud/operators.rst @@ -67,7 +67,7 @@ The below examples demonstrate how to instantiate DbtCloudRunJobOperator tasks w asynchronous waiting for run termination, respectively. To note, the ``account_id`` for the operators is referenced within the ``default_args`` of the example DAG. -.. exampleinclude:: /../../tests/system/providers/dbt/cloud/example_dbt_cloud.py +.. exampleinclude:: /../../providers/tests/system/dbt/cloud/example_dbt_cloud.py :language: python :dedent: 4 :start-after: [START howto_operator_dbt_cloud_run_job] @@ -76,7 +76,7 @@ referenced within the ``default_args`` of the example DAG. This next example also shows how to pass in custom runtime configuration (in this case for ``threads_override``) via the ``additional_run_config`` dictionary. -.. exampleinclude:: /../../tests/system/providers/dbt/cloud/example_dbt_cloud.py +.. exampleinclude:: /../../providers/tests/system/dbt/cloud/example_dbt_cloud.py :language: python :dedent: 4 :start-after: [START howto_operator_dbt_cloud_run_job_async] @@ -95,7 +95,7 @@ In the example below, the ``run_id`` value in the example below comes from the o DbtCloudRunJobOperator task by utilizing the ``.output`` property exposed for all operators. Also, to note, the ``account_id`` for the task is referenced within the ``default_args`` of the example DAG. -.. exampleinclude:: /../../tests/system/providers/dbt/cloud/example_dbt_cloud.py +.. exampleinclude:: /../../providers/tests/system/dbt/cloud/example_dbt_cloud.py :language: python :dedent: 4 :start-after: [START howto_operator_dbt_cloud_run_job_sensor] @@ -104,7 +104,7 @@ the ``account_id`` for the task is referenced within the ``default_args`` of the Also, you can poll for status of the job run asynchronously using ``deferrable`` mode. In this mode, worker slots are freed up while the sensor is running. -.. exampleinclude:: /../../tests/system/providers/dbt/cloud/example_dbt_cloud.py +.. exampleinclude:: /../../providers/tests/system/dbt/cloud/example_dbt_cloud.py :language: python :dedent: 4 :start-after: [START howto_operator_dbt_cloud_run_job_sensor_deferred] @@ -125,7 +125,7 @@ downloaded. For more information on dbt Cloud artifacts, reference `this documentation `__. -.. exampleinclude:: /../../tests/system/providers/dbt/cloud/example_dbt_cloud.py +.. exampleinclude:: /../../providers/tests/system/dbt/cloud/example_dbt_cloud.py :language: python :dedent: 4 :start-after: [START howto_operator_dbt_cloud_get_artifact] @@ -146,7 +146,7 @@ If a ``project_id`` is supplied, only jobs pertaining to this project id will be For more information on dbt Cloud list jobs, reference `this documentation `__. -.. exampleinclude:: /../../tests/system/providers/dbt/cloud/example_dbt_cloud.py +.. exampleinclude:: /../../providers/tests/system/dbt/cloud/example_dbt_cloud.py :language: python :dedent: 4 :start-after: [START howto_operator_dbt_cloud_list_jobs] diff --git a/docs/apache-airflow-providers-dingding/changelog.rst b/docs/apache-airflow-providers-dingding/changelog.rst index 43f13ec33466e..7bf7561b35dbf 100644 --- a/docs/apache-airflow-providers-dingding/changelog.rst +++ b/docs/apache-airflow-providers-dingding/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/dingding/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/dingding/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-dingding/index.rst b/docs/apache-airflow-providers-dingding/index.rst index be557b9bc5a1b..fd799c323c749 100644 --- a/docs/apache-airflow-providers-dingding/index.rst +++ b/docs/apache-airflow-providers-dingding/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/dingding/index> + System Tests <_api/tests/system/dingding/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-dingding/operators.rst b/docs/apache-airflow-providers-dingding/operators.rst index dc3ca1b5ffe9e..600c321b9f1f7 100644 --- a/docs/apache-airflow-providers-dingding/operators.rst +++ b/docs/apache-airflow-providers-dingding/operators.rst @@ -38,7 +38,7 @@ Basic Usage Use the :class:`~airflow.providers.dingding.operators.dingding.DingdingOperator` to send message through `DingTalk Custom Robot `__: -.. exampleinclude:: /../../tests/system/providers/dingding/example_dingding.py +.. exampleinclude:: /../../providers/tests/system/dingding/example_dingding.py :language: python :dedent: 4 :start-after: [START howto_operator_dingding] @@ -51,7 +51,7 @@ Remind users in message Use parameters ``at_mobiles`` and ``at_all`` to remind specific users when you send message, ``at_mobiles`` will be ignored When ``at_all`` is set to ``True``: -.. exampleinclude:: /../../tests/system/providers/dingding/example_dingding.py +.. exampleinclude:: /../../providers/tests/system/dingding/example_dingding.py :language: python :dedent: 4 :start-after: [START howto_operator_dingding_remind_users] @@ -66,7 +66,7 @@ can send rich text messages including link, markdown, actionCard and feedCard through `DingTalk Custom Robot `__. A rich text message can not remind specific users except by using markdown type message: -.. exampleinclude:: /../../tests/system/providers/dingding/example_dingding.py +.. exampleinclude:: /../../providers/tests/system/dingding/example_dingding.py :language: python :dedent: 4 :start-after: [START howto_operator_dingding_rich_text] @@ -80,7 +80,7 @@ Sending messages from a Task callback and then pass the function to ``sla_miss_callback``, ``on_success_callback``, ``on_failure_callback``, or ``on_retry_callback``. Here we use ``on_failure_callback`` as an example: -.. exampleinclude:: /../../tests/system/providers/dingding/example_dingding.py +.. exampleinclude:: /../../providers/tests/system/dingding/example_dingding.py :language: python :start-after: [START howto_operator_dingding_failure_callback] :end-before: [END howto_operator_dingding_failure_callback] diff --git a/docs/apache-airflow-providers-discord/changelog.rst b/docs/apache-airflow-providers-discord/changelog.rst index c5056231dabec..aec69ca9c5b1f 100644 --- a/docs/apache-airflow-providers-discord/changelog.rst +++ b/docs/apache-airflow-providers-discord/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/discord/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/discord/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-docker/changelog.rst b/docs/apache-airflow-providers-docker/changelog.rst index 46a39571be6e1..910d96f6cbe01 100644 --- a/docs/apache-airflow-providers-docker/changelog.rst +++ b/docs/apache-airflow-providers-docker/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/docker/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/docker/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-docker/decorators/docker.rst b/docs/apache-airflow-providers-docker/decorators/docker.rst index cfcd51860920b..df5b237bfa710 100644 --- a/docs/apache-airflow-providers-docker/decorators/docker.rst +++ b/docs/apache-airflow-providers-docker/decorators/docker.rst @@ -162,7 +162,7 @@ ulimits Usage Example ------------- -.. exampleinclude:: /../../tests/system/providers/docker/example_taskflow_api_docker_virtualenv.py +.. exampleinclude:: /../../providers/tests/system/docker/example_taskflow_api_docker_virtualenv.py :language: python :start-after: [START transform_docker] :end-before: [END transform_docker] diff --git a/docs/apache-airflow-providers-docker/index.rst b/docs/apache-airflow-providers-docker/index.rst index 24593b8755889..4f5f263f263d1 100644 --- a/docs/apache-airflow-providers-docker/index.rst +++ b/docs/apache-airflow-providers-docker/index.rst @@ -42,14 +42,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/docker/index> + System Tests <_api/tests/system/docker/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-edge/changelog.rst b/docs/apache-airflow-providers-edge/changelog.rst index 4a87ccc753b07..46bd53ed4ccc6 100644 --- a/docs/apache-airflow-providers-edge/changelog.rst +++ b/docs/apache-airflow-providers-edge/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/edge/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/edge/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-elasticsearch/changelog.rst b/docs/apache-airflow-providers-elasticsearch/changelog.rst index 496bc18ce3663..840359e3b0a4e 100644 --- a/docs/apache-airflow-providers-elasticsearch/changelog.rst +++ b/docs/apache-airflow-providers-elasticsearch/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/elasticsearch/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/elasticsearch/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-elasticsearch/connections/elasticsearch.rst b/docs/apache-airflow-providers-elasticsearch/connections/elasticsearch.rst index c068abe1a3c77..8097b8bd61c4f 100644 --- a/docs/apache-airflow-providers-elasticsearch/connections/elasticsearch.rst +++ b/docs/apache-airflow-providers-elasticsearch/connections/elasticsearch.rst @@ -72,7 +72,7 @@ For example: export AIRFLOW_CONN_ELASTICSEARCH_DEFAULT='elasticsearch://elasticsearchlogin:elasticsearchpassword@elastic.co:80/http' -.. exampleinclude:: /../../tests/system/providers/elasticsearch/example_elasticsearch_query.py +.. exampleinclude:: /../../providers/tests/system/elasticsearch/example_elasticsearch_query.py :language: python :dedent: 4 :start-after: [START howto_elasticsearch_query] diff --git a/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_python_hook.rst b/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_python_hook.rst index ff1f925182ce0..537b4973b41cf 100644 --- a/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_python_hook.rst +++ b/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_python_hook.rst @@ -36,7 +36,7 @@ es_conn_args Usage Example --------------------- -.. exampleinclude:: /../../tests/system/providers/elasticsearch/example_elasticsearch_query.py +.. exampleinclude:: /../../providers/tests/system/elasticsearch/example_elasticsearch_query.py :language: python :start-after: [START howto_elasticsearch_python_hook] :end-before: [END howto_elasticsearch_python_hook] diff --git a/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_sql_hook.rst b/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_sql_hook.rst index 6021f4ac5dfeb..084d445cb0bad 100644 --- a/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_sql_hook.rst +++ b/docs/apache-airflow-providers-elasticsearch/hooks/elasticsearch_sql_hook.rst @@ -26,7 +26,7 @@ Elasticsearch Hook that interact with Elasticsearch through the elasticsearch-db Usage Example --------------------- -.. exampleinclude:: /../../tests/system/providers/elasticsearch/example_elasticsearch_query.py +.. exampleinclude:: /../../providers/tests/system/elasticsearch/example_elasticsearch_query.py :language: python :start-after: [START howto_elasticsearch_query] :end-before: [END howto_elasticsearch_query] diff --git a/docs/apache-airflow-providers-elasticsearch/index.rst b/docs/apache-airflow-providers-elasticsearch/index.rst index 727d852d7cf26..6aeca1e89e97d 100644 --- a/docs/apache-airflow-providers-elasticsearch/index.rst +++ b/docs/apache-airflow-providers-elasticsearch/index.rst @@ -51,7 +51,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/elasticsearch/index> + System Tests <_api/tests/system/elasticsearch/index> .. toctree:: :hidden: diff --git a/docs/apache-airflow-providers-exasol/changelog.rst b/docs/apache-airflow-providers-exasol/changelog.rst index 998d537e49b47..b1f41f01d8ee0 100644 --- a/docs/apache-airflow-providers-exasol/changelog.rst +++ b/docs/apache-airflow-providers-exasol/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/exasol/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/exasol/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-fab/auth-manager/access-control.rst b/docs/apache-airflow-providers-fab/auth-manager/access-control.rst index 5292c3f1f6296..914635942b7c1 100644 --- a/docs/apache-airflow-providers-fab/auth-manager/access-control.rst +++ b/docs/apache-airflow-providers-fab/auth-manager/access-control.rst @@ -46,7 +46,7 @@ Viewer ^^^^^^ ``Viewer`` users have limited read permissions: -.. exampleinclude:: /../../airflow/providers/fab/auth_manager/security_manager/override.py +.. exampleinclude:: /../../providers/src/airflow/providers/fab/auth_manager/security_manager/override.py :language: python :start-after: [START security_viewer_perms] :end-before: [END security_viewer_perms] @@ -55,7 +55,7 @@ User ^^^^ ``User`` users have ``Viewer`` permissions plus additional permissions: -.. exampleinclude:: /../../airflow/providers/fab/auth_manager/security_manager/override.py +.. exampleinclude:: /../../providers/src/airflow/providers/fab/auth_manager/security_manager/override.py :language: python :start-after: [START security_user_perms] :end-before: [END security_user_perms] @@ -64,7 +64,7 @@ Op ^^ ``Op`` users have ``User`` permissions plus additional permissions: -.. exampleinclude:: /../../airflow/providers/fab/auth_manager/security_manager/override.py +.. exampleinclude:: /../../providers/src/airflow/providers/fab/auth_manager/security_manager/override.py :language: python :start-after: [START security_op_perms] :end-before: [END security_op_perms] @@ -74,7 +74,7 @@ Admin ``Admin`` users have all possible permissions, including granting or revoking permissions from other users. ``Admin`` users have ``Op`` permission plus additional permissions: -.. exampleinclude:: /../../airflow/providers/fab/auth_manager/security_manager/override.py +.. exampleinclude:: /../../providers/src/airflow/providers/fab/auth_manager/security_manager/override.py :language: python :start-after: [START security_admin_perms] :end-before: [END security_admin_perms] diff --git a/docs/apache-airflow-providers-fab/changelog.rst b/docs/apache-airflow-providers-fab/changelog.rst index c6bdcaa11e75a..41fc2e14c26a5 100644 --- a/docs/apache-airflow-providers-fab/changelog.rst +++ b/docs/apache-airflow-providers-fab/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/fab/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/fab/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-facebook/changelog.rst b/docs/apache-airflow-providers-facebook/changelog.rst index e6717758dc284..e84fbb0ce3094 100644 --- a/docs/apache-airflow-providers-facebook/changelog.rst +++ b/docs/apache-airflow-providers-facebook/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/facebook/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/facebook/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-ftp/changelog.rst b/docs/apache-airflow-providers-ftp/changelog.rst index c9c69b3a7d2aa..36f8c3d14951e 100644 --- a/docs/apache-airflow-providers-ftp/changelog.rst +++ b/docs/apache-airflow-providers-ftp/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/ftp/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/ftp/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-ftp/index.rst b/docs/apache-airflow-providers-ftp/index.rst index 269f712d8d257..25dfa5ef7d756 100644 --- a/docs/apache-airflow-providers-ftp/index.rst +++ b/docs/apache-airflow-providers-ftp/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/ftp/index> + System Tests <_api/tests/system/ftp/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources @@ -65,7 +65,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/ftp/index> + System Tests <_api/tests/system/ftp/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-ftp/operators/index.rst b/docs/apache-airflow-providers-ftp/operators/index.rst index 48933b9ed8a21..3e2f6592e1c35 100644 --- a/docs/apache-airflow-providers-ftp/operators/index.rst +++ b/docs/apache-airflow-providers-ftp/operators/index.rst @@ -33,7 +33,7 @@ For parameter definition take a look at :class:`~airflow.providers.ftp.operators The below example shows how to use the FTPFileTransmitOperator to transfer a locally stored file to a remote FTP Server: -.. exampleinclude:: /../../tests/system/providers/ftp/example_ftp.py +.. exampleinclude:: /../../providers/tests/system/ftp/example_ftp.py :language: python :dedent: 4 :start-after: [START howto_operator_ftp_put] @@ -41,7 +41,7 @@ The below example shows how to use the FTPFileTransmitOperator to transfer a loc The below example shows how to use the FTPFileTransmitOperator to pull a file from a remote FTP Server. -.. exampleinclude:: /../../tests/system/providers/ftp/example_ftp.py +.. exampleinclude:: /../../providers/tests/system/ftp/example_ftp.py :language: python :dedent: 4 :start-after: [START howto_operator_ftp_get] @@ -63,7 +63,7 @@ For parameter definition take a look at :class:`~airflow.providers.ftp.operators The below example shows how to use the FTPSFileTransmitOperator to transfer a locally stored file to a remote FTPS Server: -.. exampleinclude:: /../../tests/system/providers/ftp/example_ftp.py +.. exampleinclude:: /../../providers/tests/system/ftp/example_ftp.py :language: python :dedent: 4 :start-after: [START howto_operator_ftps_put] @@ -71,7 +71,7 @@ The below example shows how to use the FTPSFileTransmitOperator to transfer a lo The below example shows how to use the FTPSFileTransmitOperator to pull a file from a remote FTPS Server. -.. exampleinclude:: /../../tests/system/providers/ftp/example_ftp.py +.. exampleinclude:: /../../providers/tests/system/ftp/example_ftp.py :language: python :dedent: 4 :start-after: [START howto_operator_ftps_get] diff --git a/docs/apache-airflow-providers-github/changelog.rst b/docs/apache-airflow-providers-github/changelog.rst index 93bddd8d65fb4..231425db490d2 100644 --- a/docs/apache-airflow-providers-github/changelog.rst +++ b/docs/apache-airflow-providers-github/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/github/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/github/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-github/index.rst b/docs/apache-airflow-providers-github/index.rst index eb8fc3249f986..847dba31d1068 100644 --- a/docs/apache-airflow-providers-github/index.rst +++ b/docs/apache-airflow-providers-github/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/github/index> + System Tests <_api/tests/system/github/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-github/operators/index.rst b/docs/apache-airflow-providers-github/operators/index.rst index 1b2bf63894af1..448fb8dc3dc20 100644 --- a/docs/apache-airflow-providers-github/operators/index.rst +++ b/docs/apache-airflow-providers-github/operators/index.rst @@ -33,7 +33,7 @@ You can further process the result using An example of Listing all Repositories owned by a user, **client.get_user().get_repos()** can be implemented as following: -.. exampleinclude:: /../../tests/system/providers/github/example_github.py +.. exampleinclude:: /../../providers/tests/system/github/example_github.py :language: python :dedent: 4 :start-after: [START howto_operator_list_repos_github] @@ -43,7 +43,7 @@ An example of Listing all Repositories owned by a user, **client.get_user().get_ An example of Listing Tags in a Repository, **client.get_repo(full_name_or_id='apache/airflow').get_tags()** can be implemented as following: -.. exampleinclude:: /../../tests/system/providers/github/example_github.py +.. exampleinclude:: /../../providers/tests/system/github/example_github.py :language: python :dedent: 4 :start-after: [START howto_operator_list_tags_github] @@ -64,7 +64,7 @@ a Tag in `GitHub `__. An example for tag **v1.0**: -.. exampleinclude:: /../../tests/system/providers/github/example_github.py +.. exampleinclude:: /../../providers/tests/system/github/example_github.py :language: python :dedent: 4 :start-after: [START howto_tag_sensor_github] @@ -73,7 +73,7 @@ An example for tag **v1.0**: Similar Functionality can be achieved by directly using :class:`~from airflow.providers.github.sensors.github.GithubSensor`. -.. exampleinclude:: /../../tests/system/providers/github/example_github.py +.. exampleinclude:: /../../providers/tests/system/github/example_github.py :language: python :dedent: 4 :start-after: [START howto_sensor_github] diff --git a/docs/apache-airflow-providers-google/changelog.rst b/docs/apache-airflow-providers-google/changelog.rst index d18b3ef2017a9..f078efd1e3af5 100644 --- a/docs/apache-airflow-providers-google/changelog.rst +++ b/docs/apache-airflow-providers-google/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/google/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/google/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-google/example-dags.rst b/docs/apache-airflow-providers-google/example-dags.rst index 49f4fbf327f12..15245b948c6c1 100644 --- a/docs/apache-airflow-providers-google/example-dags.rst +++ b/docs/apache-airflow-providers-google/example-dags.rst @@ -19,10 +19,10 @@ Example DAGs ============ You can learn how to use Google integrations by analyzing the source code of the example DAGs: -* `Google Ads `__ +* `Google Ads `__ * `Google Cloud (legacy) `__ -* `Google Cloud `__ -* `Google Firebase `__ +* `Google Cloud `__ +* `Google Firebase `__ * `Google Marketing Platform `__ -* `Google Workplace `__ (formerly Google Suite) -* `Google LevelDB `__ +* `Google Workplace `__ (formerly Google Suite) +* `Google LevelDB `__ diff --git a/docs/apache-airflow-providers-google/index.rst b/docs/apache-airflow-providers-google/index.rst index 05e05725aa17e..172b8d0931e7c 100644 --- a/docs/apache-airflow-providers-google/index.rst +++ b/docs/apache-airflow-providers-google/index.rst @@ -52,7 +52,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/google/index> + System Tests <_api/tests/system/google/index> .. toctree:: :hidden: diff --git a/docs/apache-airflow-providers-google/operators/ads.rst b/docs/apache-airflow-providers-google/operators/ads.rst index 8e228bc0fde93..af5fe5913a9d2 100644 --- a/docs/apache-airflow-providers-google/operators/ads.rst +++ b/docs/apache-airflow-providers-google/operators/ads.rst @@ -32,7 +32,7 @@ Google Ads to GCS To query the Google Ads API and generate a CSV report of the results use ``GoogleAdsToGcsOperator``. -.. exampleinclude:: /../../tests/system/providers/google/ads/example_ads.py +.. exampleinclude:: /../../providers/tests/system/google/ads/example_ads.py :language: python :dedent: 4 :start-after: [START howto_google_ads_to_gcs_operator] @@ -58,7 +58,7 @@ Upload Google Ads Accounts to GCS To upload Google Ads accounts to Google Cloud Storage bucket use the ``GoogleAdsListAccountsOperator``. -.. exampleinclude:: /../../tests/system/providers/google/ads/example_ads.py +.. exampleinclude:: /../../providers/tests/system/google/ads/example_ads.py :language: python :dedent: 4 :start-after: [START howto_ads_list_accounts_operator] diff --git a/docs/apache-airflow-providers-google/operators/cloud/automl.rst b/docs/apache-airflow-providers-google/operators/cloud/automl.rst index 8a92f49ac34f1..ebfe3ca501f44 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/automl.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/automl.rst @@ -46,7 +46,7 @@ All the functionality of legacy AutoML Natural Language, Vision, Video Intellige available on the Vertex AI platform. Please use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.CreateDatasetOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/automl/example_automl_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/automl/example_automl_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_automl_create_dataset] @@ -55,7 +55,7 @@ available on the Vertex AI platform. Please use After creating a dataset you can use it to import some data using :class:`~airflow.providers.google.cloud.operators.automl.AutoMLImportDataOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/automl/example_automl_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/automl/example_automl_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_automl_import_data] @@ -69,7 +69,7 @@ All the functionality of legacy AutoML Natural Language, Vision, Video Intellige available on the Vertex AI platform. Please use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.UpdateDatasetOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_update_dataset_operator] @@ -111,7 +111,7 @@ available on the Vertex AI platform. Please use You can find example on how to use VertexAI operators for AutoML Vision classification here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/automl/example_automl_vision_classification.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/automl/example_automl_vision_classification.py :language: python :dedent: 4 :start-after: [START howto_cloud_create_image_classification_training_job_operator] @@ -119,7 +119,7 @@ You can find example on how to use VertexAI operators for AutoML Vision classifi Example on how to use VertexAI operators for AutoML Video Intelligence classification you can find here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/automl/example_automl_video_classification.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/automl/example_automl_video_classification.py :language: python :dedent: 4 :start-after: [START howto_cloud_create_video_classification_training_job_operator] @@ -131,7 +131,7 @@ datasets. To create and import data to the dataset please use and :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.ImportDataOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/automl/example_automl_translation.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/automl/example_automl_translation.py :language: python :dedent: 4 :start-after: [START howto_operator_automl_create_model] @@ -145,7 +145,7 @@ and will be removed after 31.03.2024. Please use :class:`airflow.providers.google.cloud.operators.vertex_ai.model_service.GetModelOperator` instead. You can find example on how to use VertexAI operators here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_get_model_operator] @@ -159,7 +159,7 @@ and will be removed after 31.03.2024. Please use :class:`airflow.providers.google.cloud.operators.vertex_ai.endpoint_service.DeployModelOperator` instead. You can find example on how to use VertexAI operators here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_deploy_model_operator] @@ -173,7 +173,7 @@ and will be removed after 31.03.2024. Please use :class:`airflow.providers.google.cloud.operators.vertex_ai.model_service.DeleteModelOperator` instead. You can find example on how to use VertexAI operators here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_model_operator] @@ -190,13 +190,12 @@ To obtain predictions from Google Cloud AutoML model you can use :class:`~airflow.providers.google.cloud.operators.automl.AutoMLBatchPredictOperator`. In the first case the model must be deployed. -.. exampleinclude:: /../../tests/system/providers/google/cloud/automl/example_automl_translation.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/automl/example_automl_translation.py :language: python :dedent: 4 :start-after: [START howto_operator_prediction] :end-before: [END howto_operator_prediction] - Th :class:`~airflow.providers.google.cloud.operators.automl.AutoMLBatchPredictOperator` deprecated for tables, video intelligence, vision and natural language is deprecated and will be removed after 31.03.2024. Please use :class:`airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.CreateBatchPredictionJobOperator`, @@ -206,19 +205,19 @@ video intelligence, vision and natural language is deprecated and will be remove instead. You can find examples on how to use VertexAI operators here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_batch_prediction_job_operator] :end-before: [END how_to_cloud_vertex_ai_create_batch_prediction_job_operator] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_batch_prediction_job_operator] :end-before: [END how_to_cloud_vertex_ai_list_batch_prediction_job_operator] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_batch_prediction_job_operator] @@ -239,7 +238,7 @@ and will be removed after 31.03.2024. Please use :class:`airflow.providers.google.cloud.operators.vertex_ai.dataset.ListDatasetsOperator` instead. You can find example on how to use VertexAI operators here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_dataset_operator] @@ -253,7 +252,7 @@ and will be removed after 31.03.2024. Please use :class:`airflow.providers.google.cloud.operators.vertex_ai.dataset.DeleteDatasetOperator` instead. You can find example on how to use VertexAI operators here: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_dataset_operator] diff --git a/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst b/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst index 1635372a35d7a..5e2c6689c2365 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/bigquery.rst @@ -43,7 +43,7 @@ Create dataset To create an empty dataset in a BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_dataset] @@ -59,7 +59,7 @@ To get the details of an existing dataset you can use This operator returns a `Dataset Resource `__. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_get_dataset] @@ -73,7 +73,7 @@ List tables in dataset To retrieve the list of tables in a given dataset use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryGetDatasetTablesOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_get_dataset_tables] @@ -90,7 +90,7 @@ To update a table in BigQuery you can use The update method replaces the entire Table resource, whereas the patch method only replaces fields that are provided in the submitted Table resource. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_update_table] @@ -107,7 +107,7 @@ To update a dataset in BigQuery you can use The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_update_dataset] @@ -121,7 +121,7 @@ Delete dataset To delete an existing dataset from a BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteDatasetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_dataset] @@ -144,7 +144,7 @@ ways. You may either directly pass the schema fields in, or you may point the operator to a Google Cloud Storage object name. The object in Google Cloud Storage must be a JSON file with the schema fields in it. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_table] @@ -152,7 +152,7 @@ Storage must be a JSON file with the schema fields in it. You can use this operator to create a view on top of an existing table. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_view] @@ -161,7 +161,7 @@ You can use this operator to create a view on top of an existing table. You can also use this operator to create a materialized view that periodically cache results of a query for increased performance and efficiency. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_materialized_view] @@ -180,7 +180,7 @@ Similarly to :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyTableOperator` you can directly pass the schema fields in. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_operations.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_external_table] @@ -188,7 +188,7 @@ you can directly pass the schema fields in. Or you may point the operator to a Google Cloud Storage object name where the schema is stored. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_create_table_schema_json] @@ -213,7 +213,7 @@ nesting will a nested list where elements would represent the column values for that row. ``True`` - A Python list of dictionaries, where each dictionary represents a row. In each dictionary, the keys are the column names and the values are the corresponding values for those columns. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_get_data] @@ -224,7 +224,7 @@ The below example shows how to use in async (deferrable) mode. Note that a deferrable task requires the Triggerer to be running on your Airflow deployment. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_get_data_async] @@ -241,7 +241,7 @@ To upsert a table you can use This operator either updates the existing table or creates a new, empty table in the given dataset. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_upsert_table] @@ -258,7 +258,7 @@ To update the schema of a table you can use This operator updates the schema field values supplied, while leaving the rest unchanged. This is useful for instance to set new field descriptions on an existing table schema. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_update_table_schema] @@ -272,7 +272,7 @@ Delete table To delete an existing table you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteTableOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_table] @@ -280,7 +280,7 @@ To delete an existing table you can use You can also use this operator to delete a view. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_view] @@ -288,7 +288,7 @@ You can also use this operator to delete a view. You can also use this operator to delete a materialized view. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_delete_materialized_view] @@ -301,7 +301,7 @@ Execute BigQuery jobs Let's say you would like to execute the following query. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 0 :start-after: [START howto_operator_bigquery_query] @@ -311,7 +311,7 @@ To execute the SQL query in a specific BigQuery database you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryInsertJobOperator` with proper query job configuration that can be Jinja templated. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_insert_job] @@ -322,7 +322,7 @@ The below example shows how to use in async (deferrable) mode. Note that a deferrable task requires the Triggerer to be running on your Airflow deployment. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_insert_job_async] @@ -334,7 +334,7 @@ For more information on types of BigQuery job please check If you want to include some files in your configuration you can use ``include`` clause of Jinja template language as follow: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_select_job] @@ -350,7 +350,7 @@ then it will reattach to the existing job. Also for all this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_insert_job_async] @@ -371,7 +371,7 @@ This operator expects a sql query that will return a single row. Each value on that first row is evaluated using python ``bool`` casting. If any of the values return ``False`` the check is failed and errors out. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_check] @@ -379,7 +379,7 @@ return ``False`` the check is failed and errors out. Also you can use deferrable mode in this operator -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_check_async] @@ -397,7 +397,7 @@ These operators expects a sql query that will return a single row. Each value on that first row is evaluated against ``pass_value`` which can be either a string or numeric value. If numeric, you can also specify ``tolerance``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_value_check] @@ -405,7 +405,7 @@ or numeric value. If numeric, you can also specify ``tolerance``. Also you can use deferrable mode in this operator -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_value_check_async] @@ -421,7 +421,7 @@ tolerance of the ones from ``days_back`` before you can either use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryIntervalCheckOperator` or :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryIntervalCheckAsyncOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 8 :start-after: [START howto_operator_bigquery_interval_check] @@ -429,7 +429,7 @@ tolerance of the ones from ``days_back`` before you can either use Also you can use deferrable mode in this operator -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_interval_check_async] @@ -443,7 +443,7 @@ Check columns with predefined tests To check that columns pass user-configurable tests you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryColumnCheckOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_column_check] @@ -457,7 +457,7 @@ Check table level data quality To check that tables pass user-defined tests you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryTableCheckOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_table_check] @@ -475,7 +475,7 @@ use the ``{{ ds_nodash }}`` macro as the table name suffix. :class:`~airflow.providers.google.cloud.sensors.bigquery.BigQueryTableExistenceSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table] @@ -483,13 +483,13 @@ use the ``{{ ds_nodash }}`` macro as the table name suffix. Also you can use deferrable mode in this operator if you would like to free up the worker slots while the sensor is running. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table_defered] :end-before: [END howto_sensor_bigquery_table_defered] -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_async_bigquery_table] @@ -501,7 +501,7 @@ Check that a Table Partition exists To check that a table exists and has a partition you can use. :class:`~airflow.providers.google.cloud.sensors.bigquery.BigQueryTablePartitionExistenceSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table_partition] @@ -511,13 +511,13 @@ For DAY partitioned tables, the partition_id parameter is a string on the "%Y%m% Also you can use deferrable mode in this operator if you would like to free up the worker slots while the sensor is running. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table_partition_defered] :end-before: [END howto_sensor_bigquery_table_partition_defered] -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py :language: python :dedent: 4 :start-after: [START howto_sensor_bigquery_table_partition_async] diff --git a/docs/apache-airflow-providers-google/operators/cloud/bigquery_dts.rst b/docs/apache-airflow-providers-google/operators/cloud/bigquery_dts.rst index 0d3663419baad..002716c8ef630 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/bigquery_dts.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/bigquery_dts.rst @@ -47,7 +47,7 @@ for example :class:`~airflow.providers.google.cloud.operators.bigquery_dts.BigQu scheduling option is present in passed configuration. If present then nothing is done, otherwise its value is set to ``True``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py :language: python :start-after: [START howto_bigquery_dts_create_args] :end-before: [END howto_bigquery_dts_create_args] @@ -55,7 +55,7 @@ set to ``True``. You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Basic usage of the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py :language: python :dedent: 4 :start-after: [START howto_bigquery_create_data_transfer] @@ -78,7 +78,7 @@ To delete DTS transfer configuration you can use Basic usage of the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py :language: python :dedent: 4 :start-after: [START howto_bigquery_delete_data_transfer] @@ -99,7 +99,7 @@ Start manual transfer runs to be executed now with schedule_time equal to curren Basic usage of the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py :language: python :dedent: 4 :start-after: [START howto_bigquery_start_transfer] @@ -112,7 +112,7 @@ parameters which allows you to dynamically determine values. To check if operation succeeded you can use :class:`~airflow.providers.google.cloud.sensors.bigquery_dts.BigQueryDataTransferServiceTransferRunSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py :language: python :dedent: 4 :start-after: [START howto_bigquery_dts_sensor] diff --git a/docs/apache-airflow-providers-google/operators/cloud/bigtable.rst b/docs/apache-airflow-providers-google/operators/cloud/bigtable.rst index 870cfe50fce82..d77a689447244 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/bigtable.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/bigtable.rst @@ -42,7 +42,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigtable/example_bigtable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigtable/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_instance_create] @@ -65,7 +65,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigtable/example_bigtable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigtable/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_instance_update] @@ -85,7 +85,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigtable/example_bigtable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigtable/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_instance_delete] @@ -105,7 +105,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigtable/example_bigtable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigtable/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_cluster_update] @@ -129,7 +129,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigtable/example_bigtable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigtable/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_table_create] @@ -157,7 +157,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigtable/example_bigtable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigtable/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_table_delete] @@ -182,7 +182,7 @@ timeout hits and does not raise any exception. Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigtable/example_bigtable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigtable/example_bigtable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_bigtable_table_wait_for_replication] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_batch.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_batch.rst index 2254ead25b1a3..c4ae3db7de395 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_batch.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_batch.rst @@ -35,7 +35,7 @@ For more information about the Job object fields, visit `Google Cloud Batch Job A simple job configuration can look as follows: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py :language: python :dedent: 0 :start-after: [START howto_operator_batch_job_creation] @@ -44,7 +44,7 @@ A simple job configuration can look as follows: With this configuration we can submit the job: :class:`~airflow.providers.google.cloud.operators.cloud_batch.CloudBatchSubmitJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_batch_submit_job] @@ -53,7 +53,7 @@ With this configuration we can submit the job: or you can define the same operator in the deferrable mode: :class:`~airflow.providers.google.cloud.operators.cloud_batch.CloudBatchSubmitJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_batch_submit_job_deferrable_mode] @@ -68,7 +68,7 @@ To list the tasks of a certain job, you can use: :class:`~airflow.providers.google.cloud.operators.cloud_batch.CloudBatchListTasksOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_batch_list_tasks] @@ -83,7 +83,7 @@ To list the jobs, you can use: :class:`~airflow.providers.google.cloud.operators.cloud_batch.CloudBatchListJobsOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_batch_list_jobs] @@ -98,7 +98,7 @@ To delete a job you can use: :class:`~airflow.providers.google.cloud.operators.cloud_batch.CloudBatchDeleteJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_job] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_build.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_build.rst index f7d91154095fc..e5e8e601b030c 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_build.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_build.rst @@ -46,7 +46,7 @@ Using the operator Cancel a build in progress with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildCancelBuildOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_cancel_build] @@ -74,7 +74,7 @@ Build configuration In order to trigger a build, it is necessary to pass the build configuration. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 0 :start-after: [START howto_operator_gcp_create_build_from_storage_body] @@ -82,7 +82,7 @@ In order to trigger a build, it is necessary to pass the build configuration. In addition, a build can refer to source stored in `Google Cloud Source Repositories `__. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 0 :start-after: [START howto_operator_create_build_from_repo_body] @@ -96,7 +96,7 @@ Using the operator Trigger a build is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildCreateBuildOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_from_storage] @@ -104,7 +104,7 @@ Trigger a build is performed with the You can use deferrable mode for this action in order to run the operator asynchronously: -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_from_storage_async] @@ -115,7 +115,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_from_storage_result] @@ -124,7 +124,7 @@ to be used by other operators. By default, after the build is created, it will wait for the build operation to complete. If there is no need to wait for complete, you can pass wait=False as example shown below. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_without_wait] @@ -132,7 +132,7 @@ you can pass wait=False as example shown below. You can use deferrable mode for this action in order to run the operator asynchronously: -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_without_wait_async] @@ -141,7 +141,7 @@ You can use deferrable mode for this action in order to run the operator asynchr In order to start a build on Cloud Build you can use a build configuration file. A build config file defines the fields that are needed for Cloud Build to perform your tasks. You can write the build config file using the YAML or the JSON syntax. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_create_build_from_yaml_body] @@ -149,7 +149,7 @@ that are needed for Cloud Build to perform your tasks. You can write the build c You can use deferrable mode for this action in order to run the operator asynchronously: -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_create_build_from_yaml_body_async] @@ -158,7 +158,7 @@ You can use deferrable mode for this action in order to run the operator asynchr In addition, a Cloud Build can refer to source stored in `Google Cloud Source Repositories `__. Once build has started, it ill build the code in source repositories. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 0 :start-after: [START howto_operator_create_build_from_repo] @@ -166,7 +166,7 @@ Once build has started, it ill build the code in source repositories. You can use deferrable mode for this action in order to run the operator asynchronously: -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_from_repo_async] @@ -189,7 +189,7 @@ Using the operator Creates a new Cloud Build trigger with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildCreateBuildTriggerOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_trigger] @@ -216,7 +216,7 @@ Using the operator Deletes a new Cloud Build trigger with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildDeleteBuildTriggerOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_build_trigger] @@ -243,7 +243,7 @@ Using the operator Returns information about a previously requested build with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildGetBuildOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_get_build] @@ -270,7 +270,7 @@ Using the operator Returns information about a Cloud Build trigger with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildGetBuildTriggerOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_get_build_trigger] @@ -297,7 +297,7 @@ Using the operator Lists all the existing Cloud Build triggers with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildListBuildTriggersOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_list_build_triggers] @@ -324,7 +324,7 @@ Using the operator Lists previously requested builds with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildListBuildsOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_list_builds] @@ -352,7 +352,7 @@ Using the operator Creates a new build based on the specified build with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildRetryBuildOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build.py :language: python :dedent: 4 :start-after: [START howto_operator_retry_build] @@ -379,7 +379,7 @@ Using the operator Runs a trigger at a particular source revision with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildRunBuildTriggerOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_run_build_trigger] @@ -406,7 +406,7 @@ Using the operator Updates a Cloud Build trigger with the :class:`~airflow.providers.google.cloud.operators.cloud_build.CloudBuildUpdateBuildTriggerOperator` operator. -.. exampleinclude:: ../../../../tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +.. exampleinclude:: ../../../../providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_create_build_trigger] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_composer.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_composer.rst index f8f00fbe6c54a..7b05bbc6baa0a 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_composer.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_composer.rst @@ -39,7 +39,7 @@ For more information about the available fields to pass when creating a environm A simple environment configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 0 :start-after: [START howto_operator_composer_simple_environment] @@ -48,7 +48,7 @@ A simple environment configuration can look as followed: With this configuration we can create the environment: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerCreateEnvironmentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_create_composer_environment] @@ -57,7 +57,7 @@ With this configuration we can create the environment: or you can define the same operator in the deferrable mode: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerCreateEnvironmentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_create_composer_environment_deferrable_mode] @@ -70,7 +70,7 @@ To get a environment you can use: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerGetEnvironmentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_get_composer_environment] @@ -83,7 +83,7 @@ To get a environment you can use: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerListEnvironmentsOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_list_composer_environments] @@ -98,7 +98,7 @@ For more information on updateMask and other parameters take a look at `Cloud Co An example of a new service config and the updateMask: -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 0 :start-after: [START howto_operator_composer_update_environment] @@ -107,7 +107,7 @@ An example of a new service config and the updateMask: To update a service you can use: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerUpdateEnvironmentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_update_composer_environment] @@ -116,7 +116,7 @@ To update a service you can use: or you can define the same operator in the deferrable mode: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerCreateEnvironmentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_update_composer_environment_deferrable_mode] @@ -129,7 +129,7 @@ To delete a service you can use: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerDeleteEnvironmentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_composer_environment] @@ -138,7 +138,7 @@ To delete a service you can use: or you can define the same operator in the deferrable mode: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerDeleteEnvironmentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_composer_environment_deferrable_mode] @@ -152,7 +152,7 @@ You can also list all supported Cloud Composer images: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerListImageVersionsOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_composer_image_list] @@ -164,7 +164,7 @@ Run Airflow CLI commands You can run Airflow CLI commands in your environments, use: :class:`~airflow.providers.google.cloud.operators.cloud_composer.CloudComposerRunAirflowCLICommandOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_run_airflow_cli_command] @@ -172,7 +172,7 @@ You can run Airflow CLI commands in your environments, use: or you can define the same operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_operator_run_airflow_cli_command_deferrable_mode] @@ -184,7 +184,7 @@ Check if a DAG run has completed You can use sensor that checks if a DAG run has completed in your environments, use: :class:`~airflow.providers.google.cloud.sensors.cloud_composer.CloudComposerDAGRunSensor` -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_sensor_dag_run] @@ -192,7 +192,7 @@ You can use sensor that checks if a DAG run has completed in your environments, or you can define the same sensor in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/composer/example_cloud_composer.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/composer/example_cloud_composer.py :language: python :dedent: 4 :start-after: [START howto_sensor_dag_run_deferrable_mode] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore.rst index 7e7c04ba582d2..9adeea88513a3 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore.rst @@ -41,7 +41,7 @@ presented as a compatible dictionary also. Here is an example of instance -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :start-after: [START howto_operator_instance] :end-before: [END howto_operator_instance] @@ -59,7 +59,7 @@ make a use of the service account listed under ``persistenceIamIdentity``. You can use :class:`~airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator` operator to set permissions. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_set_acl_permission] @@ -76,7 +76,7 @@ Create instance Create a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreCreateInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_create_instance] @@ -87,7 +87,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_create_instance_result] @@ -102,7 +102,7 @@ Delete instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreDeleteInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_instance] @@ -120,7 +120,7 @@ Export instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreExportInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_export_instance] @@ -138,7 +138,7 @@ Failover instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreFailoverInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_failover_instance] @@ -156,7 +156,7 @@ Get instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreGetInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_get_instance] @@ -174,7 +174,7 @@ Import instance Delete a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreImportOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_import_instance] @@ -192,7 +192,7 @@ List instances List a instances is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreListInstancesOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_list_instances] @@ -203,7 +203,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_list_instances_result] @@ -217,7 +217,7 @@ Update instance Update a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreUpdateInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_update_instance] @@ -236,7 +236,7 @@ Scale instance Scale a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreScaleInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py :language: python :dedent: 4 :start-after: [START howto_operator_scale_instance] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore_memcached.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore_memcached.rst index 2fe980be0215f..6f52020c283be 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore_memcached.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore_memcached.rst @@ -41,7 +41,7 @@ The object can be presented as a compatible dictionary also. Here is an example of instance -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py :language: python :start-after: [START howto_operator_memcached_instance] :end-before: [END howto_operator_memcached_instance] @@ -56,7 +56,7 @@ Create a instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedCreateInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py :language: python :dedent: 4 :start-after: [START howto_operator_create_instance_memcached] @@ -72,7 +72,7 @@ Delete an instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedDeleteInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_instance_memcached] @@ -88,7 +88,7 @@ Get an instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedGetInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py :language: python :dedent: 4 :start-after: [START howto_operator_get_instance_memcached] @@ -104,7 +104,7 @@ List instances is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedListInstancesOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py :language: python :dedent: 4 :start-after: [START howto_operator_list_instances_memcached] @@ -120,7 +120,7 @@ Updating an instance is performed with the :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedUpdateInstanceOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py :language: python :dedent: 4 :start-after: [START howto_operator_update_instance_memcached] @@ -138,7 +138,7 @@ and :class:`~airflow.providers.google.cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedApplyParametersOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py :language: python :dedent: 4 :start-after: [START howto_operator_update_and_apply_parameters_memcached] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_run.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_run.rst index 95a3b3a676679..65a28ea4f42c0 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_run.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_run.rst @@ -35,7 +35,7 @@ For more information about the Job object fields, visit `Google Cloud Run Job de A simple job configuration can be created with a Job object: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 0 :start-after: [START howto_cloud_run_job_instance_creation] @@ -43,7 +43,7 @@ A simple job configuration can be created with a Job object: or with a Python dictionary: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 0 :start-after: [START howto_cloud_run_job_dict_creation] @@ -52,7 +52,7 @@ or with a Python dictionary: You can create a Cloud Run Job with any of these configurations : :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunCreateJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_run_create_job] @@ -69,7 +69,7 @@ For more information about the Service object fields, visit `Google Cloud Run Se A simple service configuration can look as follows: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py :language: python :dedent: 0 :start-after: [START howto_operator_cloud_run_service_creation] @@ -79,7 +79,7 @@ A simple service configuration can look as follows: With this configuration we can create the service: :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunCreateServiceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_run_create_service] @@ -94,7 +94,7 @@ Delete a service With this configuration we can delete the service: :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunDeleteServiceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_run_delete_service] @@ -110,7 +110,7 @@ To execute a job, you can use: :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunExecuteJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_run_execute_job] @@ -120,7 +120,7 @@ or you can define the same operator in the deferrable mode: :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunExecuteJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_run_execute_job_deferrable_mode] @@ -130,7 +130,7 @@ You can also specify overrides that allow you to give a new entrypoint command t :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunExecuteJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_run_execute_job_with_overrides] @@ -144,7 +144,7 @@ To update a job, you can use: :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunUpdateJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_update_job] @@ -161,7 +161,7 @@ To list the jobs, you can use: :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunListJobsOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_run_list_jobs] @@ -177,7 +177,7 @@ To delete a job you can use: :class:`~airflow.providers.google.cloud.operators.cloud_run.CloudRunDeleteJobOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_run/example_cloud_run.py :language: python :dedent: 4 :start-after: [START howto_operator_cloud_delete_job] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst index 42a32712867d9..e1b572046d609 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst @@ -42,7 +42,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_db_create] @@ -50,7 +50,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Example request body: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_db_create_body] :end-before: [END howto_operator_cloudsql_db_create_body] @@ -58,7 +58,7 @@ Example request body: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_db_create_template_fields] @@ -87,7 +87,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_db_delete] @@ -96,7 +96,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_db_delete_template_fields] @@ -127,7 +127,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_db_patch] @@ -135,7 +135,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Example request body: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_db_patch_body] :end-before: [END howto_operator_cloudsql_db_patch_body] @@ -143,7 +143,7 @@ Example request body: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_db_patch_template_fields] @@ -174,7 +174,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_delete] @@ -183,7 +183,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_delete_template_fields] @@ -215,7 +215,7 @@ Arguments Example body defining the export operation: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_export_body] :end-before: [END howto_operator_cloudsql_export_body] @@ -226,7 +226,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_export] @@ -234,7 +234,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Also for all this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_export_async] @@ -243,7 +243,7 @@ Also for all this action you can use operator in the deferrable mode: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_export_template_fields] @@ -268,7 +268,7 @@ To grant the service account with the appropriate WRITE permissions for the GCS you can use the :class:`~airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator`, as shown in the example: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_export_gcs_permissions] @@ -308,7 +308,7 @@ Arguments Example body defining the import operation: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_import_body] :end-before: [END howto_operator_cloudsql_import_body] @@ -319,7 +319,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_import] @@ -328,7 +328,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_import_template_fields] @@ -353,7 +353,7 @@ To grant the service account with the appropriate READ permissions for the GCS o you can use the :class:`~airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator`, as shown in the example: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_import_gcs_permissions] @@ -379,7 +379,7 @@ Arguments Example body defining the instance with failover replica: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_create_body] :end-before: [END howto_operator_cloudsql_create_body] @@ -390,7 +390,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_create] @@ -399,7 +399,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_create_template_fields] @@ -430,7 +430,7 @@ Arguments Example body defining the instance: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :start-after: [START howto_operator_cloudsql_patch_body] :end-before: [END howto_operator_cloudsql_patch_body] @@ -441,7 +441,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_patch] @@ -450,7 +450,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_patch_template_fields] @@ -484,7 +484,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_cloudsql_clone] @@ -493,7 +493,7 @@ Cloud connection used. Both variants are shown: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_clone_template_fields] @@ -558,14 +558,14 @@ NFS-like volumes in the same path for all the workers. Example connection definitions for all non-SSL connectivity. Note that all the components of the connection URI should be URL-encoded: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py :language: python :start-after: [START howto_operator_cloudsql_query_connections] :end-before: [END howto_operator_cloudsql_query_connections] Similar connection definition for all SSL-enabled connectivity: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py :language: python :start-after: [START howto_operator_cloudsql_query_connections] :end-before: [END howto_operator_cloudsql_query_connections] @@ -574,12 +574,12 @@ It is also possible to configure a connection via environment variable (note tha matches the :envvar:`AIRFLOW_CONN_{CONN_ID}` postfix uppercase if you are using a standard AIRFLOW notation for defining connection via environment variables): -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py :language: python :start-after: [START howto_operator_cloudsql_query_connections_env] :end-before: [END howto_operator_cloudsql_query_connections_env] -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py :language: python :start-after: [START howto_operator_cloudsql_query_connections_env] :end-before: [END howto_operator_cloudsql_query_connections_env] @@ -593,7 +593,7 @@ or the connection configured via environment variable (note that the connection :envvar:`AIRFLOW_CONN_{CONN_ID}` postfix uppercase if you are using a standard AIRFLOW notation for defining connection via environment variables): -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py :language: python :start-after: [START howto_operator_cloudsql_query_operators] :end-before: [END howto_operator_cloudsql_query_operators] @@ -603,7 +603,7 @@ will be overridden. One of the ways to do so is specifying paths to each certifi Note that these files will be copied into a temporary location with minimal required permissions for security reasons. -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py :language: python :start-after: [START howto_operator_cloudsql_query_operators_ssl] :end-before: [END howto_operator_cloudsql_query_operators_ssl] @@ -614,7 +614,7 @@ format is: {"sslcert": "", "sslkey": "", "sslrootcert": ""} -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py :language: python :start-after: [START howto_operator_cloudsql_query_operators_ssl_secret_id] :end-before: [END howto_operator_cloudsql_query_operators_ssl_secret_id] @@ -622,7 +622,7 @@ format is: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_sql.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_sql.py :language: python :dedent: 4 :start-after: [START gcp_sql_query_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/cloud_storage_transfer_service.rst b/docs/apache-airflow-providers-google/operators/cloud/cloud_storage_transfer_service.rst index df3365631f1ce..4779ce0dfcd91 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/cloud_storage_transfer_service.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/cloud_storage_transfer_service.rst @@ -62,17 +62,17 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py :language: python :start-after: [START howto_operator_gcp_transfer_create_job_body_gcp] :end-before: [END howto_operator_gcp_transfer_create_job_body_gcp] -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :start-after: [START howto_operator_gcp_transfer_create_job_body_aws] :end-before: [END howto_operator_gcp_transfer_create_job_body_aws] -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_create_job] @@ -81,7 +81,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_job_create_template_fields] @@ -107,7 +107,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_delete_job] @@ -116,7 +116,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_job_delete_template_fields] @@ -142,7 +142,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_run_job] @@ -151,7 +151,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_job_run_template_fields] @@ -177,12 +177,12 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py :language: python :start-after: [START howto_operator_gcp_transfer_update_job_body] :end-before: [END howto_operator_gcp_transfer_update_job_body] -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_update_job] @@ -191,7 +191,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_job_update_template_fields] @@ -216,7 +216,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_cancel_operation] @@ -225,7 +225,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operation_cancel_template_fields] @@ -252,7 +252,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_get_operation] @@ -261,7 +261,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operation_get_template_fields] @@ -287,7 +287,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_list_operations] @@ -296,7 +296,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operations_list_template_fields] @@ -321,7 +321,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_pause_operation] @@ -330,7 +330,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operation_pause_template_fields] @@ -355,7 +355,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_resume_operation] @@ -364,7 +364,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_operation_resume_template_fields] @@ -390,7 +390,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_transfer_wait_operation] @@ -399,7 +399,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_job_sensor_template_fields] @@ -418,7 +418,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_transfer_gcs_to_gcs] @@ -427,7 +427,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py :language: python :dedent: 4 :start-after: [START gcp_transfer_gcs_to_gcs_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/compute.rst b/docs/apache-airflow-providers-google/operators/cloud/compute.rst index 1628db48e1364..df10dbc479c5e 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/compute.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/compute.rst @@ -39,7 +39,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_insert] @@ -48,7 +48,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection id used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_insert_no_project_id] @@ -58,7 +58,7 @@ from the Google Cloud connection id used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_insert_fields] @@ -84,7 +84,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_insert_from_template] @@ -93,7 +93,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection id used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_insert_from_template_no_project_id] @@ -103,7 +103,7 @@ from the Google Cloud connection id used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_insert_from_template_fields] @@ -130,7 +130,7 @@ Using the operator You can create the operator without project id - project id will be retrieved from the Google Cloud connection id used. The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_delete_no_project_id] @@ -140,7 +140,7 @@ from the Google Cloud connection id used. The code to create the operator: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_delete_template_fields] @@ -166,7 +166,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_start] @@ -175,7 +175,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection id used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_start_no_project_id] @@ -185,7 +185,7 @@ from the Google Cloud connection id used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_start_template_fields] @@ -212,7 +212,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_stop] @@ -221,7 +221,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_stop_no_project_id] @@ -230,7 +230,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_stop_template_fields] @@ -262,7 +262,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_set_machine_type] @@ -271,7 +271,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_set_machine_type_no_project_id] @@ -280,7 +280,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_set_machine_type_template_fields] @@ -310,7 +310,7 @@ The code to create the operator: You can create the operator without project id - project id will be retrieved from the Google Cloud connection used. The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_delete_old_template_no_project_id] @@ -319,7 +319,7 @@ from the Google Cloud connection used. The code to create the operator: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_template_delete_fields] @@ -346,7 +346,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_insert_template] @@ -355,7 +355,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_insert_template_no_project_id] @@ -364,7 +364,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_template_insert_fields] @@ -392,7 +392,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_copy_template] @@ -401,7 +401,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_copy_template_no_project_id] @@ -410,7 +410,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_instance_template_copy_operator_template_fields] @@ -441,7 +441,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :start-after: [START howto_operator_gce_insert_igm] :end-before: [END howto_operator_gce_insert_igm] @@ -449,7 +449,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_insert_igm_no_project_id] @@ -459,7 +459,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_igm_insert_fields] @@ -491,7 +491,7 @@ Using the operator You can create the operator without project id - project id will be retrieved from the Google Cloud connection used. The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_delete_igm_no_project_id] @@ -501,7 +501,7 @@ from the Google Cloud connection used. The code to create the operator: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_igm_delete_fields] @@ -532,7 +532,7 @@ Using the operator The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_update_template] @@ -541,7 +541,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_igm.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_igm.py :language: python :dedent: 4 :start-after: [START howto_operator_gce_igm_update_template_no_project_id] @@ -551,7 +551,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/compute.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/compute.py :language: python :dedent: 4 :start-after: [START gce_igm_update_template_operator_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/compute_ssh.rst b/docs/apache-airflow-providers-google/operators/cloud/compute_ssh.rst index 8e50f01566aff..b4e5c10bbf186 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/compute_ssh.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/compute_ssh.rst @@ -45,7 +45,7 @@ Please note that the target instance must allow tcp traffic on port 22. Below is the code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_ssh.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_ssh.py :language: python :dedent: 4 :start-after: [START howto_execute_command_on_remote_1] @@ -54,7 +54,7 @@ Below is the code to create the operator: You can also create the hook without project id - project id will be retrieved from the Google credentials used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/compute/example_compute_ssh.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/compute/example_compute_ssh.py :language: python :dedent: 4 :start-after: [START howto_execute_command_on_remote_2] diff --git a/docs/apache-airflow-providers-google/operators/cloud/data_loss_prevention.rst b/docs/apache-airflow-providers-google/operators/cloud/data_loss_prevention.rst index 76ac1abbd1d06..2147e9f1ab9ab 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/data_loss_prevention.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/data_loss_prevention.rst @@ -37,7 +37,7 @@ Create Stored Info-Type To create a custom info-type you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPCreateStoredInfoTypeOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_create_info_type] @@ -70,7 +70,7 @@ Update Stored Info-Type To update a info-type you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPUpdateStoredInfoTypeOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_update_info_type] @@ -85,7 +85,7 @@ Deleting Stored Info-Type To delete a info-type you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPDeleteStoredInfoTypeOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_delete_info_type] @@ -112,7 +112,7 @@ Creating Template To create a inspection template you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPCreateInspectTemplateOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_inspect_template.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_create_inspect_template] @@ -139,7 +139,7 @@ Using Template To find potentially sensitive info using the inspection template we just created, we can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPInspectContentOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_inspect_template.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_use_inspect_template] @@ -161,7 +161,7 @@ Deleting Template To delete the template you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPDeleteInspectTemplateOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_inspect_template.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_delete_inspect_template] @@ -236,7 +236,7 @@ Creating Job Trigger To create a job trigger you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPCreateJobTriggerOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job_trigger.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_create_job_trigger] @@ -261,7 +261,7 @@ Updating Job Trigger To update a job trigger you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPUpdateJobTriggerOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job_trigger.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_update_job_trigger] @@ -275,7 +275,7 @@ Deleting Job Trigger To delete a job trigger you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPDeleteJobTriggerOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job_trigger.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py :language: python :dedent: 4 :start-after: [START howto_operator_dlp_delete_job_trigger] @@ -297,7 +297,7 @@ Configuration information defines how you want the sensitive data de-identified. This config can either be saved and persisted in de-identification templates or defined in a :class:`~google.cloud.dlp_v2.types.DeidentifyConfig` object: -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py :language: python :start-after: [START dlp_deidentify_config_example] :end-before: [END dlp_deidentify_config_example] @@ -305,7 +305,7 @@ This config can either be saved and persisted in de-identification templates or To de-identify potentially sensitive information from a content item, you can use :class:`~airflow.providers.google.cloud.operators.cloud.dlp.CloudDLPDeidentifyContentOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py :language: python :dedent: 4 :start-after: [START _howto_operator_dlp_deidentify_content] diff --git a/docs/apache-airflow-providers-google/operators/cloud/datacatalog.rst b/docs/apache-airflow-providers-google/operators/cloud/datacatalog.rst index eaad69735e45d..10383b62ba25d 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/datacatalog.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/datacatalog.rst @@ -59,7 +59,7 @@ operators. The ``CloudDataCatalogGetEntryOperator`` use Project ID, Entry Group ID, Entry ID to get the entry. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_entry] @@ -71,7 +71,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_entry_result] @@ -79,7 +79,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The ``CloudDataCatalogLookupEntryOperator`` use the resource name to get the entry. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_lookup_entry_linked_resource] @@ -91,7 +91,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_lookup_entry_result] @@ -105,7 +105,7 @@ Creating an entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateEntryOperator` operator create the entry. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_gcs] @@ -119,7 +119,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created entry ID can be read with the ``entry_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_gcs_result] @@ -133,7 +133,7 @@ Updating an entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateEntryOperator` operator update the entry. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_update_entry] @@ -151,7 +151,7 @@ Deleting a entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteEntryOperator` operator delete the entry. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_entry] @@ -180,7 +180,7 @@ Creating an entry group The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateEntryGroupOperator` operator create the entry group. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_group] @@ -194,7 +194,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created entry group ID can be read with the ``entry_group_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_entry_group_result] @@ -208,7 +208,7 @@ Getting an entry group The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetEntryGroupOperator` operator get the entry group. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_entry_group] @@ -220,7 +220,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_entry_group_result] @@ -234,7 +234,7 @@ Deleting an entry group The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteEntryGroupOperator` operator delete the entry group. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_entries.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_entries.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_entry_group] @@ -263,7 +263,7 @@ Creating a tag template The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateOperator` operator get the tag template. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_template] @@ -277,7 +277,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created tag template ID can be read with the ``tag_template_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_template_result] @@ -291,7 +291,7 @@ Deleting a tag template The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateOperator` operator delete the tag template. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_tag_template] @@ -310,7 +310,7 @@ Getting a tag template The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogGetTagTemplateOperator` operator get the tag template. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_tag_template] @@ -322,7 +322,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_get_tag_template_result] @@ -336,7 +336,7 @@ Updating a tag template The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateOperator` operator update the tag template. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_update_tag_template] @@ -365,7 +365,7 @@ Creating a tag on an entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagOperator` operator get the tag template. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tags.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tags.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag] @@ -379,7 +379,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created tag ID can be read with the ``tag_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tags.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tags.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_result] @@ -393,7 +393,7 @@ Updating a tag The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagOperator` operator update the tag template. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tags.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tags.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_update_tag] @@ -411,7 +411,7 @@ Deleting a tag The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagOperator` operator delete the tag template. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tags.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tags.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_tag] @@ -429,7 +429,7 @@ Listing tags on an entry The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogListTagsOperator` operator get list of the tags on the entry. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tags.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tags.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_list_tags] @@ -441,7 +441,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tags.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tags.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_list_tags_result] @@ -467,7 +467,7 @@ Creating a field The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateFieldOperator` operator get the tag template field. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_template_field] @@ -481,7 +481,7 @@ The result is saved to :ref:`XCom `, which allows it to be used b The newly created field ID can be read with the ``tag_template_field_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_create_tag_template_field_result] @@ -495,7 +495,7 @@ Renaming a field The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogRenameTagTemplateFieldOperator` operator rename the tag template field. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_rename_tag_template_field] @@ -513,7 +513,7 @@ Updating a field The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateFieldOperator` operator get the tag template field. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_update_tag_template_field] @@ -532,7 +532,7 @@ Deleting a field The :class:`~airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateFieldOperator` operator delete the tag template field. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_delete_tag_template_field] @@ -553,7 +553,7 @@ operator searches Data Catalog for multiple resources like entries, tags that ma The ``query`` parameters should defined using `search syntax `__. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_search_catalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_search_catalog] @@ -565,7 +565,7 @@ parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py +.. exampleinclude:: /../../providers/tests/system/google/datacatalog/example_datacatalog_search_catalog.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_datacatalog_search_catalog_result] diff --git a/docs/apache-airflow-providers-google/operators/cloud/dataflow.rst b/docs/apache-airflow-providers-google/operators/cloud/dataflow.rst index a9eb98ea9a509..21895e4e6e679 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/dataflow.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataflow.rst @@ -78,7 +78,7 @@ This will create a new pipeline that will be visible on Dataflow Pipelines UI. Here is an example of how you can create a Dataflow Pipeline by running DataflowCreatePipelineOperator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_pipeline.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_create_dataflow_pipeline] @@ -87,7 +87,7 @@ Here is an example of how you can create a Dataflow Pipeline by running Dataflow To run a newly created pipeline you can use :class:`~airflow.providers.google.cloud.operators.dataflow.DataflowRunPipelineOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_pipeline.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_run_dataflow_pipeline] @@ -119,7 +119,7 @@ has the ability to download or available on the local filesystem (provide the ab Here is an example of creating and running a pipeline in Java with jar stored on GCS: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_java.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py :language: python :dedent: 4 :start-after: [START howto_operator_start_java_job_jar_on_gcs] @@ -127,7 +127,7 @@ Here is an example of creating and running a pipeline in Java with jar stored on Here is an example of creating and running a pipeline in Java with jar stored on GCS in deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_java.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py :language: python :dedent: 4 :start-after: [START howto_operator_start_java_job_jar_on_gcs_deferrable] @@ -135,7 +135,7 @@ Here is an example of creating and running a pipeline in Java with jar stored on Here is an example of creating and running a pipeline in Java with jar stored on local file system: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_java.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py :language: python :dedent: 4 :start-after: [START howto_operator_start_java_job_local_jar] @@ -162,7 +162,7 @@ The ``py_system_site_packages`` argument specifies whether or not all the Python will be accessible within virtual environment (if ``py_requirements`` argument is specified), recommend avoiding unless the Dataflow job requires it. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_python.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_python.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_job] @@ -197,7 +197,7 @@ or Python file) and how it is written. In order for the Dataflow job to execute pipeline objects are not being waited upon (not calling ``waitUntilFinish`` or ``wait_until_finish`` on the ``PipelineResult`` in your application code). -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py :language: python :dedent: 4 :start-after: [START howto_operator_start_python_job_async] @@ -221,7 +221,7 @@ Streaming execution To execute a streaming Dataflow job, ensure the streaming option is set (for Python) or read from an unbounded data source, such as Pub/Sub, in your pipeline (for Java). -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_streaming_python.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_streaming_python.py :language: python :dedent: 4 :start-after: [START howto_operator_start_streaming_python_job] @@ -249,7 +249,7 @@ See the `official documentation for Dataflow templates Here is an example of running a Dataflow job using a Classic Template with :class:`~airflow.providers.google.cloud.operators.dataflow.DataflowTemplatedJobStartOperator`: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_template.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_template.py :language: python :dedent: 4 :start-after: [START howto_operator_start_template_job] @@ -257,7 +257,7 @@ Here is an example of running a Dataflow job using a Classic Template with Also for this action you can use the operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_template.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_template.py :language: python :dedent: 4 :start-after: [START howto_operator_start_template_job_deferrable] @@ -269,7 +269,7 @@ See the `list of Google-provided templates that can be used with this operator Here is an example of running a Dataflow job using a Flex Template with :class:`~airflow.providers.google.cloud.operators.dataflow.DataflowStartFlexTemplateOperator`: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_template.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_template.py :language: python :dedent: 4 :start-after: [START howto_operator_start_flex_template_job] @@ -277,7 +277,7 @@ Here is an example of running a Dataflow job using a Flex Template with Also for this action you can use the operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_template.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_template.py :language: python :dedent: 4 :start-after: [START howto_operator_start_flex_template_job_deferrable] @@ -293,7 +293,7 @@ extensions for running Dataflow streaming jobs. Here is an example of running Dataflow SQL job with :class:`~airflow.providers.google.cloud.operators.dataflow.DataflowStartSqlJobOperator`: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_sql.py :language: python :dedent: 4 :start-after: [START howto_operator_start_sql_job] @@ -317,7 +317,7 @@ This API can be used to define both streaming and batch pipelines. Here is an example of running Dataflow YAML job with :class:`~airflow.providers.google.cloud.operators.dataflow.DataflowStartYamlJobOperator`: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_yaml.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_yaml.py :language: python :dedent: 4 :start-after: [START howto_operator_dataflow_start_yaml_job] @@ -325,7 +325,7 @@ Here is an example of running Dataflow YAML job with This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_yaml.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_yaml.py :language: python :dedent: 4 :start-after: [START howto_operator_dataflow_start_yaml_job_def] @@ -347,7 +347,7 @@ To stop one or more Dataflow pipelines you can use Streaming pipelines are drained by default, setting ``drain_pipeline`` to ``False`` will cancel them instead. Provide ``job_id`` to stop a specific job, or ``job_name_prefix`` to stop all jobs with provided name prefix. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_python.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_python.py :language: python :dedent: 4 :start-after: [START howto_operator_stop_dataflow_job] @@ -364,7 +364,7 @@ To delete a Dataflow pipeline you can use :class:`~airflow.providers.google.cloud.operators.dataflow.DataflowDeletePipelineOperator`. Here is an example how you can use this operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_pipeline.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_dataflow_pipeline] @@ -382,7 +382,7 @@ When job is triggered asynchronously sensors may be used to run checks for speci :class:`~airflow.providers.google.cloud.sensors.dataflow.DataflowJobStatusSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_status] @@ -390,7 +390,7 @@ When job is triggered asynchronously sensors may be used to run checks for speci This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_sensors_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_status_deferrable] @@ -398,7 +398,7 @@ This operator can be run in deferrable mode by passing ``deferrable=True`` as a :class:`~airflow.providers.google.cloud.sensors.dataflow.DataflowJobMetricsSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_metric] @@ -406,7 +406,7 @@ This operator can be run in deferrable mode by passing ``deferrable=True`` as a This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_sensors_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_metric_deferrable] @@ -414,7 +414,7 @@ This operator can be run in deferrable mode by passing ``deferrable=True`` as a :class:`~airflow.providers.google.cloud.sensors.dataflow.DataflowJobMessagesSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_message] @@ -422,7 +422,7 @@ This operator can be run in deferrable mode by passing ``deferrable=True`` as a This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_sensors_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_message_deferrable] @@ -430,7 +430,7 @@ This operator can be run in deferrable mode by passing ``deferrable=True`` as a :class:`~airflow.providers.google.cloud.sensors.dataflow.DataflowJobAutoScalingEventsSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_autoscaling_event] @@ -438,7 +438,7 @@ This operator can be run in deferrable mode by passing ``deferrable=True`` as a This operator can be run in deferrable mode by passing ``deferrable=True`` as a parameter. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataflow/example_dataflow_sensors_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py :language: python :dedent: 4 :start-after: [START howto_sensor_wait_for_job_autoscaling_event_deferrable] diff --git a/docs/apache-airflow-providers-google/operators/cloud/dataform.rst b/docs/apache-airflow-providers-google/operators/cloud/dataform.rst index 97d9d28e223e0..09d8a6e6b8f93 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/dataform.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataform.rst @@ -40,7 +40,7 @@ Create repository for tracking your code in Dataform service. Example of usage c :class:`~airflow.providers.google.cloud.operators.dataform.DataformCreateRepositoryOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_operator_create_repository] @@ -52,7 +52,7 @@ Create workspace for storing your code in Dataform service. Example of usage can :class:`~airflow.providers.google.cloud.operators.dataform.DataformCreateWorkspaceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_operator_create_workspace] @@ -65,7 +65,7 @@ A simple configuration to create Compilation Result can look as followed: :class:`~airflow.providers.google.cloud.operators.dataform.DataformCreateCompilationResultOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_operator_create_compilation_result] @@ -78,7 +78,7 @@ To get a Compilation Result you can use: :class:`~airflow.providers.google.cloud.operators.dataform.DataformGetCompilationResultOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_get_compilation_result] @@ -95,13 +95,13 @@ We have possibility to run this operation in the sync mode and async, for async a sensor: :class:`~airflow.providers.google.cloud.operators.dataform.DataformWorkflowInvocationStateSensor` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_create_workflow_invocation] :end-before: [END howto_operator_create_workflow_invocation] -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_create_workflow_invocation_async] @@ -114,7 +114,7 @@ To get a Workflow Invocation you can use: :class:`~airflow.providers.google.cloud.operators.dataform.DataformGetWorkflowInvocationOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_get_workflow_invocation] @@ -127,7 +127,7 @@ To query Workflow Invocation Actions you can use: :class:`~airflow.providers.google.cloud.operators.dataform.DataformQueryWorkflowInvocationActionsOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_query_workflow_invocation_actions] @@ -140,7 +140,7 @@ To cancel a Workflow Invocation you can use: :class:`~airflow.providers.google.cloud.sensors.dataform.DataformCancelWorkflowInvocationOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_cancel_workflow_invocation] @@ -152,7 +152,7 @@ Deletes repository. Example of usage can be seen below: :class:`~airflow.providers.google.cloud.operators.dataform.DataformDeleteRepositoryOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_operator_delete_workspace] @@ -164,7 +164,7 @@ Deletes workspace. Example of usage can be seen below: :class:`~airflow.providers.google.cloud.operators.dataform.DataformDeleteRepositoryOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_operator_delete_repository] @@ -176,7 +176,7 @@ Removes file. Example of usage can be seen below: :class:`~airflow.providers.google.cloud.operators.dataform.DataformRemoveFileOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_operator_remove_file] @@ -188,7 +188,7 @@ Removes directory. Example of usage can be seen below: :class:`~airflow.providers.google.cloud.operators.dataform.DataformRemoveDirectoryOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_operator_remove_directory] @@ -200,7 +200,7 @@ Creates default projects structure for provided workspace. Before it can be done :class:`~airflow.providers.google.cloud.utils.dataform.make_initialization_workspace_flow` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 0 :start-after: [START howto_initialize_workspace] @@ -212,7 +212,7 @@ Writes file with given content to specified workspace. :class:`~airflow.providers.google.cloud.operators.dataform.DataformWriteFileOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_write_file] @@ -224,7 +224,7 @@ Make directory with given path in specified workspace. :class:`~airflow.providers.google.cloud.operators.dataform.DataformMakeDirectoryOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_make_directory] @@ -236,7 +236,7 @@ Installs npm packages for specified workspace :class:`~airflow.providers.google.cloud.operators.dataform.DataformInstallNpmPackagesOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataform/example_dataform.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataform/example_dataform.py :language: python :dedent: 4 :start-after: [START howto_operator_install_npm_packages] diff --git a/docs/apache-airflow-providers-google/operators/cloud/datafusion.rst b/docs/apache-airflow-providers-google/operators/cloud/datafusion.rst index 0a728cdb10f56..207479d5de96c 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/datafusion.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/datafusion.rst @@ -40,7 +40,7 @@ Restart DataFusion Instance To restart Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionRestartInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_restart_instance_operator] @@ -59,7 +59,7 @@ Delete DataFusion Instance To delete Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionDeleteInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_delete_instance_operator] @@ -79,7 +79,7 @@ Create DataFusion Instance To create Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionCreateInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_create_instance_operator] @@ -99,7 +99,7 @@ Update DataFusion Instance To update Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionUpdateInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_update_instance_operator] @@ -118,7 +118,7 @@ Get DataFusion Instance To retrieve Data Fusion instance use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionGetInstanceOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_get_instance_operator] @@ -138,7 +138,7 @@ Create a DataFusion pipeline To create Data Fusion pipeline use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionCreatePipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_create_pipeline] @@ -157,7 +157,7 @@ Start a DataFusion pipeline To start Data Fusion pipeline using synchronous mode: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionStartPipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_start_pipeline] @@ -166,7 +166,7 @@ To start Data Fusion pipeline using synchronous mode: To start Data Fusion pipeline using asynchronous mode: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionStartPipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_start_pipeline_async] @@ -179,7 +179,7 @@ It is not possible to use both asynchronous and deferrable parameters at the sam Please, check the example of using deferrable mode: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionStartPipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_start_pipeline_def] @@ -198,7 +198,7 @@ Stop a DataFusion pipeline To stop Data Fusion pipeline use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionStopPipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_stop_pipeline] @@ -217,7 +217,7 @@ Delete a DataFusion pipeline To delete Data Fusion pipeline use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionDeletePipelineOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_delete_pipeline] @@ -237,7 +237,7 @@ List DataFusion pipelines To list Data Fusion pipelines use: :class:`~airflow.providers.google.cloud.operators.datafusion.CloudDataFusionListPipelinesOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_list_pipelines] @@ -255,7 +255,7 @@ When start pipeline is triggered asynchronously sensors may be used to run check :class:`~airflow.providers.google.cloud.sensors.datafusion.CloudDataFusionPipelineStateSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/datafusion/example_datafusion.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datafusion/example_datafusion.py :language: python :dedent: 4 :start-after: [START howto_cloud_data_fusion_start_pipeline_sensor] diff --git a/docs/apache-airflow-providers-google/operators/cloud/datapipeline.rst b/docs/apache-airflow-providers-google/operators/cloud/datapipeline.rst index 10186138edb25..1123c48d6d792 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/datapipeline.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/datapipeline.rst @@ -51,7 +51,7 @@ The project id and location will be used to build the parent name needed to crea Here is an example of how you can create a Data Pipelines instance by running the above parameters with CreateDataPipelineOperator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/datapipelines/example_datapipeline.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datapipelines/example_datapipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_create_data_pipeline] @@ -79,7 +79,7 @@ The Project ID and Location will be used to build the parent name, which is wher You can run a Data Pipelines instance by running the above parameters with RunDataPipelineOperator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/datapipelines/example_datapipeline.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datapipelines/example_datapipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_run_data_pipeline] diff --git a/docs/apache-airflow-providers-google/operators/cloud/dataplex.rst b/docs/apache-airflow-providers-google/operators/cloud/dataplex.rst index 1b8cc56dcf51d..788024ce0bbea 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/dataplex.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataplex.rst @@ -31,7 +31,7 @@ For more information about the available fields to pass when creating a task, vi A simple task configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 0 :start-after: [START howto_dataplex_configuration] @@ -40,13 +40,13 @@ A simple task configuration can look as followed: With this configuration we can create the task both synchronously & asynchronously: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateTaskOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_create_task_operator] :end-before: [END howto_dataplex_create_task_operator] -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_async_create_task_operator] @@ -59,7 +59,7 @@ To delete a task you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexDeleteTaskOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_delete_task_operator] @@ -72,7 +72,7 @@ To list tasks you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexListTasksOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_list_tasks_operator] @@ -85,7 +85,7 @@ To get a task you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexGetTaskOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_get_task_operator] @@ -98,7 +98,7 @@ To wait for a task created asynchronously you can use: :class:`~airflow.providers.google.cloud.sensors.dataplex.DataplexTaskStateSensor` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_task_state_sensor] @@ -113,7 +113,7 @@ For more information about the available fields to pass when creating a lake, vi A simple task configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 0 :start-after: [START howto_dataplex_lake_configuration] @@ -123,7 +123,7 @@ With this configuration we can create the lake: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateLakeOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_create_lake_operator] @@ -136,7 +136,7 @@ To delete a lake you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexDeleteLakeOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex.py :language: python :dedent: 4 :start-after: [START howto_dataplex_delete_lake_operator] @@ -150,7 +150,7 @@ For more information about the available fields to pass when creating a Data Qua A simple Data Quality scan configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 0 :start-after: [START howto_dataplex_data_quality_configuration] @@ -160,7 +160,7 @@ With this configuration we can create or update the Data Quality scan: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateOrUpdateDataQualityScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_create_data_quality_operator] @@ -173,7 +173,7 @@ To get a Data Quality scan you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexGetDataQualityScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_get_data_quality_operator] @@ -188,7 +188,7 @@ To delete a Data Quality scan you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexDeleteDataQualityScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_delete_data_quality_operator] @@ -201,7 +201,7 @@ You can run Dataplex Data Quality scan in asynchronous modes to later check its :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexRunDataQualityScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_run_data_quality_operator] @@ -211,7 +211,7 @@ To check that running Dataplex Data Quality scan succeeded you can use: :class:`~airflow.providers.google.cloud.sensors.dataplex.DataplexDataQualityJobStatusSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_data_scan_job_state_sensor] @@ -219,7 +219,7 @@ To check that running Dataplex Data Quality scan succeeded you can use: Also for this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_run_data_quality_def_operator] @@ -232,7 +232,7 @@ To get a Data Quality scan job you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexGetDataQualityScanResultOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_get_data_quality_job_operator] @@ -240,7 +240,7 @@ To get a Data Quality scan job you can use: Also for this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_get_data_quality_job_def_operator] @@ -255,7 +255,7 @@ For more information about the available fields to pass when creating a zone, vi A simple zone configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 0 :start-after: [START howto_dataplex_zone_configuration] @@ -265,7 +265,7 @@ With this configuration we can create a zone: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateZoneOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_create_zone_operator] @@ -278,7 +278,7 @@ To delete a zone you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexDeleteZoneOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_delete_zone_operator] @@ -293,7 +293,7 @@ For more information about the available fields to pass when creating a asset, v A simple asset configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 0 :start-after: [START howto_dataplex_asset_configuration] @@ -303,7 +303,7 @@ With this configuration we can create the asset: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateAssetOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_create_asset_operator] @@ -316,7 +316,7 @@ To delete a asset you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexDeleteAssetOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py :language: python :dedent: 4 :start-after: [START howto_dataplex_delete_asset_operator] @@ -330,7 +330,7 @@ For more information about the available fields to pass when creating a Data Pro A simple Data Profile scan configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 0 :start-after: [START howto_dataplex_data_profile_configuration] @@ -340,7 +340,7 @@ With this configuration we can create or update the Data Profile scan: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexCreateOrUpdateDataProfileScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 4 :start-after: [START howto_dataplex_create_data_profile_operator] @@ -353,7 +353,7 @@ To get a Data Profile scan you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexGetDataProfileScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 4 :start-after: [START howto_dataplex_get_data_profile_operator] @@ -368,7 +368,7 @@ To delete a Data Profile scan you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexDeleteDataProfileScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 4 :start-after: [START howto_dataplex_delete_data_profile_operator] @@ -381,7 +381,7 @@ You can run Dataplex Data Profile scan in asynchronous modes to later check its :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexRunDataProfileScanOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 4 :start-after: [START howto_dataplex_run_data_profile_operator] @@ -391,7 +391,7 @@ To check that running Dataplex Data Profile scan succeeded you can use: :class:`~airflow.providers.google.cloud.sensors.dataplex.DataplexDataProfileJobStatusSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 4 :start-after: [START howto_dataplex_data_scan_job_state_sensor] @@ -399,7 +399,7 @@ To check that running Dataplex Data Profile scan succeeded you can use: Also for this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 4 :start-after: [START howto_dataplex_run_data_profile_def_operator] @@ -412,7 +412,7 @@ To get a Data Profile scan job you can use: :class:`~airflow.providers.google.cloud.operators.dataplex.DataplexGetDataProfileScanResultOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py :language: python :dedent: 4 :start-after: [START howto_dataplex_get_data_profile_job_operator] diff --git a/docs/apache-airflow-providers-google/operators/cloud/dataprep.rst b/docs/apache-airflow-providers-google/operators/cloud/dataprep.rst index 661443f26470b..8e7ad129fa0aa 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/dataprep.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataprep.rst @@ -59,7 +59,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataprep/example_dataprep.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataprep/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_run_job_group_operator] @@ -77,7 +77,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataprep/example_dataprep.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataprep/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_get_jobs_for_job_group_operator] @@ -96,7 +96,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataprep/example_dataprep.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataprep/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_get_job_group_operator] @@ -112,7 +112,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataprep/example_dataprep.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataprep/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_copy_flow_operator] @@ -130,7 +130,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataprep/example_dataprep.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataprep/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_dataprep_run_flow_operator] @@ -148,7 +148,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataprep/example_dataprep.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataprep/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_delete_flow_operator] @@ -167,7 +167,7 @@ To get information about jobs within a Cloud Dataprep job use: Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataprep/example_dataprep.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataprep/example_dataprep.py :language: python :dedent: 4 :start-after: [START how_to_dataprep_job_group_finished_sensor] diff --git a/docs/apache-airflow-providers-google/operators/cloud/dataproc.rst b/docs/apache-airflow-providers-google/operators/cloud/dataproc.rst index 1f7bac8566caf..f8489dd6795e3 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/dataproc.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataproc.rst @@ -56,7 +56,7 @@ For more information about the available fields to pass when creating a cluster, A cluster configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_create_cluster] @@ -65,7 +65,7 @@ A cluster configuration can look as followed: With this configuration we can create the cluster: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocCreateClusterOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_cluster_operator] @@ -84,7 +84,7 @@ This can be beneficial for running Dataproc workloads on GKE while optimizing co To create Dataproc cluster in Google Kubernetes Engine you could pass cluster configuration: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_create_cluster_in_gke_config] @@ -93,7 +93,7 @@ To create Dataproc cluster in Google Kubernetes Engine you could pass cluster co With this configuration we can create the cluster: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocCreateClusterOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_cluster_operator_in_gke] @@ -105,7 +105,7 @@ Note that default image might not support the chosen optional component. If this is your case, please specify correct ``image_version`` that you can find in the `documentation. `__ -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_presto.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_presto.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_create_cluster] @@ -118,7 +118,7 @@ If this is your case, please specify correct ``image_version`` that you can find `documentation. `__ -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_trino.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_trino.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_create_cluster] @@ -126,7 +126,7 @@ If this is your case, please specify correct ``image_version`` that you can find You can use deferrable mode for this action in order to run the operator asynchronously: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_cluster_operator_async] @@ -139,7 +139,7 @@ this could be easily done using **make()** of :class:`~airflow.providers.google.cloud.operators.dataproc.ClusterGenerator` You can generate and use config as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_create_cluster_generate_cluster_config] @@ -156,7 +156,7 @@ For more information about the available fields to pass when diagnosing a cluste To diagnose a Dataproc cluster use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocDiagnoseClusterOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_diagnose.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_diagnose_cluster] @@ -164,7 +164,7 @@ To diagnose a Dataproc cluster use: You can also use deferrable mode in order to run the operator asynchronously: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_diagnose.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_diagnose_cluster_deferrable] @@ -178,7 +178,7 @@ For more information on updateMask and other parameters take a look at `Dataproc An example of a new cluster config and the updateMask: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_update.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_updatemask_cluster_operator] @@ -187,7 +187,7 @@ An example of a new cluster config and the updateMask: To update a cluster you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocUpdateClusterOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_update.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_update_cluster_operator] @@ -195,7 +195,7 @@ To update a cluster you can use: You can use deferrable mode for this action in order to run the operator asynchronously: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_update_cluster_operator_async] @@ -207,7 +207,7 @@ Starting a cluster To start a cluster you can use the :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocStartClusterOperator`: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_start_stop.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_start_cluster_operator] @@ -219,7 +219,7 @@ Stopping a cluster To stop a cluster you can use the :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocStopClusterOperator`: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_start_stop.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_stop_cluster_operator] @@ -231,7 +231,7 @@ Deleting a cluster To delete a cluster you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocDeleteClusterOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_delete_cluster_operator] @@ -239,7 +239,7 @@ To delete a cluster you can use: You can use deferrable mode for this action in order to run the operator asynchronously: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_delete_cluster_operator_async] @@ -258,7 +258,7 @@ file system. You can specify a file:/// path to refer to a local file on a clust The job configuration can be submitted by using: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocSubmitJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_submit_job_to_cluster_operator] @@ -273,7 +273,7 @@ There are more arguments to provide in the jobs than the examples show. For the Example of the configuration for a PySpark Job: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_pyspark_config] @@ -281,7 +281,7 @@ Example of the configuration for a PySpark Job: Example of the configuration for a SparkSQl Job: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_sparksql_config] @@ -289,7 +289,7 @@ Example of the configuration for a SparkSQl Job: Example of the configuration for a Spark Job: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_spark.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_spark_config] @@ -297,7 +297,7 @@ Example of the configuration for a Spark Job: Example of the configuration for a Spark Job running in `deferrable mode `__: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_spark_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_spark_deferrable_config] @@ -305,7 +305,7 @@ Example of the configuration for a Spark Job running in `deferrable mode `__: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_cluster_for_persistent_history_server] @@ -434,7 +434,7 @@ with specific parameters. Documentation how create cluster you can find After Cluster was created you should add it to the Batch configuration. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_batch_operator_with_persistent_history_server] @@ -443,7 +443,7 @@ After Cluster was created you should add it to the Batch configuration. To check if operation succeeded you can use :class:`~airflow.providers.google.cloud.sensors.dataproc.DataprocBatchSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_batch_async_sensor] @@ -451,7 +451,7 @@ To check if operation succeeded you can use Also for all this action you can use operator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_create_batch_operator_async] @@ -463,7 +463,7 @@ Get a Batch To get a batch you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocGetBatchOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_get_batch_operator] @@ -475,7 +475,7 @@ List a Batch To get a list of exists batches you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocListBatchesOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_list_batches_operator] @@ -487,7 +487,7 @@ Delete a Batch To delete a batch you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocDeleteBatchOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_delete_batch_operator] @@ -499,7 +499,7 @@ Cancel a Batch Operation To cancel a operation you can use: :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocCancelOperationOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_cancel_operation_operator] diff --git a/docs/apache-airflow-providers-google/operators/cloud/dataproc_metastore.rst b/docs/apache-airflow-providers-google/operators/cloud/dataproc_metastore.rst index 6bf5bc9cf5408..7f04f9db81a16 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/dataproc_metastore.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/dataproc_metastore.rst @@ -33,7 +33,7 @@ For more information about the available fields to pass when creating a service, A simple service configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_metastore_create_service] @@ -42,7 +42,7 @@ A simple service configuration can look as followed: With this configuration we can create the service: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreCreateServiceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_create_service_operator] @@ -55,7 +55,7 @@ To get a service you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreGetServiceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_get_service_operator] @@ -69,7 +69,7 @@ For more information on updateMask and other parameters take a look at `Dataproc An example of a new service config and the updateMask: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_metastore_update_service] @@ -78,7 +78,7 @@ An example of a new service config and the updateMask: To update a service you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreUpdateServiceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_update_service_operator] @@ -91,7 +91,7 @@ To delete a service you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreDeleteServiceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_delete_service_operator] @@ -104,7 +104,7 @@ To export metadata you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreExportMetadataOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_export_metadata_operator] @@ -117,7 +117,7 @@ To restore a service you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreRestoreServiceOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_restore_service_operator] @@ -131,7 +131,7 @@ For more information about the available fields to pass when creating a metadata A simple metadata import configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_metastore_create_metadata_import] @@ -140,7 +140,7 @@ A simple metadata import configuration can look as followed: To create a metadata import you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreCreateMetadataImportOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_create_metadata_import_operator] @@ -154,7 +154,7 @@ For more information about the available fields to pass when creating a backup, A simple backup configuration can look as followed: -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py :language: python :dedent: 0 :start-after: [START how_to_cloud_dataproc_metastore_create_backup] @@ -163,7 +163,7 @@ A simple backup configuration can look as followed: With this configuration we can create the backup: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreCreateBackupOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_create_backup_operator] @@ -176,7 +176,7 @@ To delete a backup you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreDeleteBackupOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_delete_backup_operator] @@ -189,7 +189,7 @@ To list backups you can use: :class:`~airflow.providers.google.cloud.operators.dataproc_metastore.DataprocMetastoreListBackupsOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_list_backups_operator] @@ -201,7 +201,7 @@ Check Hive partitions existence To check that Hive partitions have been created in the Metastore for a given table you can use: :class:`~airflow.providers.google.cloud.sensors.dataproc_metastore.MetastoreHivePartitionSensor` -.. exampleinclude:: /../../tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py :language: python :dedent: 4 :start-after: [START how_to_cloud_dataproc_metastore_hive_partition_sensor] diff --git a/docs/apache-airflow-providers-google/operators/cloud/datastore.rst b/docs/apache-airflow-providers-google/operators/cloud/datastore.rst index 8218e526a192c..05b441543fbc1 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/datastore.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/datastore.rst @@ -38,7 +38,7 @@ Export Entities To export entities from Google Cloud Datastore to Cloud Storage use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreExportEntitiesOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_export_task] @@ -52,7 +52,7 @@ Import Entities To import entities from Cloud Storage to Google Cloud Datastore use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreImportEntitiesOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_import_task] @@ -66,7 +66,7 @@ Allocate Ids To allocate IDs for incomplete keys use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreAllocateIdsOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_allocate_ids] @@ -74,7 +74,7 @@ To allocate IDs for incomplete keys use An example of a partial keys required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 0 :start-after: [START how_to_keys_def] @@ -88,7 +88,7 @@ Begin transaction To begin a new transaction use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreBeginTransactionOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_begin_transaction] @@ -96,7 +96,7 @@ To begin a new transaction use An example of a transaction options required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 0 :start-after: [START how_to_transaction_def] @@ -110,7 +110,7 @@ Commit transaction To commit a transaction, optionally creating, deleting or modifying some entities use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreCommitOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START how_to_commit_task] @@ -118,7 +118,7 @@ use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreCo An example of a commit information required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 0 :start-after: [START how_to_commit_def] @@ -132,7 +132,7 @@ Run query To run a query for entities use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreRunQueryOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_query.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_query.py :language: python :dedent: 4 :start-after: [START how_to_run_query] @@ -140,7 +140,7 @@ To run a query for entities use An example of a query required by the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_query.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_query.py :language: python :dedent: 0 :start-after: [START how_to_query_def] @@ -154,7 +154,7 @@ Roll back transaction To roll back a transaction use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreRollbackOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_rollback.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_rollback.py :language: python :dedent: 4 :start-after: [START how_to_rollback_transaction] @@ -168,7 +168,7 @@ Get operation state To get the current state of a long-running operation use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreGetOperationOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START get_operation_state] @@ -182,7 +182,7 @@ Delete operation To delete an operation use :class:`~airflow.providers.google.cloud.operators.datastore.CloudDatastoreDeleteOperationOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/datastore/example_datastore_commit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/datastore/example_datastore_commit.py :language: python :dedent: 4 :start-after: [START delete_operation] diff --git a/docs/apache-airflow-providers-google/operators/cloud/functions.rst b/docs/apache-airflow-providers-google/operators/cloud/functions.rst index d8ac6bbdd6d0c..3124e8e356f74 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/functions.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/functions.rst @@ -38,7 +38,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_functions/example_functions.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_functions/example_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_gcf_delete] @@ -47,7 +47,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/functions.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/functions.py :language: python :dedent: 4 :start-after: [START gcf_function_delete_template_fields] @@ -77,7 +77,7 @@ Arguments When a DAG is created, the default_args dictionary can be used to pass arguments common with other tasks: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_functions/example_functions.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_functions/example_functions.py :language: python :start-after: [START howto_operator_gcf_default_args] :end-before: [END howto_operator_gcf_default_args] @@ -101,19 +101,19 @@ Using the operator Depending on the combination of parameters, the Function's source code can be obtained from different sources: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_functions/example_functions.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_functions/example_functions.py :language: python :start-after: [START howto_operator_gcf_deploy_body] :end-before: [END howto_operator_gcf_deploy_body] -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_functions/example_functions.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_functions/example_functions.py :language: python :start-after: [START howto_operator_gcf_deploy_variants] :end-before: [END howto_operator_gcf_deploy_variants] The code to create the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_functions/example_functions.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_functions/example_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_gcf_deploy] @@ -122,7 +122,7 @@ The code to create the operator: You can also create the operator without project id - project id will be retrieved from the Google Cloud connection used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/cloud_functions/example_functions.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/cloud_functions/example_functions.py :language: python :dedent: 4 :start-after: [START howto_operator_gcf_deploy_no_project_id] @@ -131,7 +131,7 @@ from the Google Cloud connection used: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/functions.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/functions.py :language: python :dedent: 4 :start-after: [START gcf_function_deploy_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/gcs.rst b/docs/apache-airflow-providers-google/operators/cloud/gcs.rst index c4be59533a0a6..82d38fda87b80 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/gcs.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/gcs.rst @@ -46,7 +46,7 @@ The time span is defined by the time span's start and end timestamps. If a DAG does not have a *next* DAG instance scheduled, the time span end infinite, meaning the operator processes all files older than ``data_interval_start``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_transform_timespan.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_timespan_file_transform_operator_Task] @@ -66,7 +66,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_acl.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_acl.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_bucket_create_acl_entry_task] @@ -75,7 +75,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/gcs.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/gcs.py :language: python :dedent: 4 :start-after: [START gcs_bucket_create_acl_template_fields] @@ -100,7 +100,7 @@ For parameter definition, take a look at Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_acl.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_acl.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_object_create_acl_entry_task] @@ -109,7 +109,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/gcs.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/gcs.py :language: python :dedent: 4 :start-after: [START gcs_object_create_acl_template_fields] @@ -131,7 +131,7 @@ Deleting Bucket allows you to remove bucket object from the Google Cloud Storage It is performed through the :class:`~airflow.providers.google.cloud.operators.gcs.GCSDeleteBucketOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_delete_bucket] @@ -160,7 +160,7 @@ GCSObjectExistenceSensor Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSensor` to wait (poll) for the existence of a file in Google Cloud Storage. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_exists_task] @@ -168,7 +168,7 @@ Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSe Also you can use deferrable mode in this operator if you would like to free up the worker slots while the sensor is running. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_exists_task_defered] @@ -181,7 +181,7 @@ GCSObjectExistenceAsyncSensor :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceAsyncSensor` is deprecated and will be removed in a future release. Please use :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSensor` and use the deferrable mode in that operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_exists_task_async] @@ -195,7 +195,7 @@ GCSObjectsWithPrefixExistenceSensor Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectsWithPrefixExistenceSensor` to wait (poll) for the existence of a file with a specified prefix in Google Cloud Storage. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_with_prefix_exists_task] @@ -205,7 +205,7 @@ You can set the ``deferrable`` param to True if you want this sensor to run asyn efficient utilization of resources in your Airflow deployment. However the triggerer component needs to be enabled for this functionality to work. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_with_prefix_exists_task_async] @@ -220,7 +220,7 @@ GCSUploadSessionCompleteSensor Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor` to check for a change in the number of files with a specified prefix in Google Cloud Storage. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_gcs_upload_session_complete_task] @@ -229,7 +229,7 @@ Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionComp You can set the parameter ``deferrable`` to True if you want the worker slots to be freed up while sensor is running. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_gcs_upload_session_async_task] @@ -242,7 +242,7 @@ GCSObjectUpdateSensor Use the :class:`~airflow.providers.google.cloud.sensors.gcs.GCSObjectUpdateSensor` to check if an object is updated in Google Cloud Storage. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_update_exists_task] @@ -252,7 +252,7 @@ You can set the ``deferrable`` param to True if you want this sensor to run asyn utilization of resources in your Airflow deployment. However the triggerer component needs to be enabled for this functionality to work. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_object_update_exists_task_async] diff --git a/docs/apache-airflow-providers-google/operators/cloud/index.rst b/docs/apache-airflow-providers-google/operators/cloud/index.rst index f974c0eb81a4f..3daed7a11e9bf 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/index.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/index.rst @@ -29,4 +29,4 @@ Google Cloud Operators .. note:: You can learn how to use Google Cloud integrations by analyzing the - `source code `_ of the particular example DAGs. + `source code `_ of the particular example DAGs. diff --git a/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst b/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst index d71ebf87e929f..9eaf268824975 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst @@ -43,7 +43,7 @@ Create GKE cluster Here is an example of a cluster definition: -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :start-after: [START howto_operator_gcp_gke_create_cluster_definition] :end-before: [END howto_operator_gcp_gke_create_cluster_definition] @@ -53,7 +53,7 @@ A dict object like this, or a definition, is required when creating a cluster with :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKECreateClusterOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_create_cluster] @@ -64,7 +64,7 @@ possibility to free up the worker when it knows it has to wait, and hand off the As a result, while it is suspended (deferred), it is not taking up a worker slot and your cluster will have a lot less resources wasted on idle Operators or Sensors: -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_create_cluster_async] @@ -84,7 +84,7 @@ To install and use Kueue on your cluster with the help of :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEStartKueueInsideClusterOperator` as shown in this example: -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py :language: python :start-after: [START howto_operator_gke_install_kueue] :end-before: [END howto_operator_gke_install_kueue] @@ -99,7 +99,7 @@ To delete a cluster, use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEDeleteClusterOperator`. This would also delete all the nodes allocated to the cluster. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_delete_cluster] @@ -110,7 +110,7 @@ possibility to free up the worker when it knows it has to wait, and hand off the As a result, while it is suspended (deferred), it is not taking up a worker slot and your cluster will have a lot less resources wasted on idle Operators or Sensors: -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_delete_cluster_async] @@ -167,7 +167,7 @@ is the path ``/airflow/xcom``. To provide values to the XCom, ensure your Pod wr ``return.json`` in the sidecar. The contents of this can then be used downstream in your DAG. Here is an example of it being used: -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_start_pod_xcom] @@ -175,7 +175,7 @@ Here is an example of it being used: And then use it in other operators: -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_xcom_result] @@ -186,7 +186,7 @@ possibility to free up the worker when it knows it has to wait, and hand off the As a result, while it is suspended (deferred), it is not taking up a worker slot and your cluster will have a lot less resources wasted on idle Operators or Sensors: -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_start_pod_xcom_async] @@ -207,7 +207,7 @@ There are two operators available in order to run a job on a GKE cluster: There is no need to manage the ``kube_config`` file, as it will be generated automatically. All Kubernetes parameters (except ``config_file``) are also valid for the ``GKEStartJobOperator``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_start_job] @@ -216,7 +216,7 @@ All Kubernetes parameters (except ``config_file``) are also valid for the ``GKES ``GKEStartJobOperator`` also supports deferrable mode. Note that it makes sense only if the ``wait_until_job_complete`` parameter is set ``True``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_start_job_def] @@ -224,7 +224,7 @@ parameter is set ``True``. For run Job on a GKE cluster with Kueue enabled use ``GKEStartKueueJobOperator``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py :language: python :dedent: 4 :start-after: [START howto_operator_kueue_start_job] @@ -245,7 +245,7 @@ There are two operators available in order to delete a job on a GKE cluster: There is no need to manage the ``kube_config`` file, as it will be generated automatically. All Kubernetes parameters (except ``config_file``) are also valid for the ``GKEDeleteJobOperator``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_delete_job] @@ -260,7 +260,7 @@ Retrieve information about Job by given name You can use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEDescribeJobOperator` to retrieve detailed description of existing Job by providing its name and namespace. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_describe_job] @@ -276,7 +276,7 @@ You can use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine. list of existing Jobs. If ``namespace`` parameter is provided, output will include Jobs across given namespace. If ``namespace`` parameter is not specified, the information across all the namespaces will be outputted. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_list_jobs] @@ -291,7 +291,7 @@ Create a resource in a GKE cluster You can use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKECreateCustomResourceOperator` to create resource in the specified Google Kubernetes Engine cluster. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_create_resource] @@ -306,7 +306,7 @@ Delete a resource in a GKE cluster You can use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEDeleteCustomResourceOperator` to delete resource in the specified Google Kubernetes Engine cluster. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_delete_resource] @@ -321,7 +321,7 @@ Suspend a Job on a GKE cluster You can use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKESuspendJobOperator` to suspend Job in the specified Google Kubernetes Engine cluster. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_suspend_job] @@ -336,7 +336,7 @@ Resume a Job on a GKE cluster You can use :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEResumeJobOperator` to resume Job in the specified Google Kubernetes Engine cluster. -.. exampleinclude:: /../../tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py :language: python :dedent: 4 :start-after: [START howto_operator_gke_resume_job] diff --git a/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst b/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst index 49f9b6c4ea9c2..6e7676e910508 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst @@ -34,7 +34,7 @@ Pipeline Configuration In order to run the pipeline, it is necessary to configure the request body. Here is an example of the pipeline configuration with a single action. -.. exampleinclude:: /../../tests/system/providers/google/cloud/life_sciences/example_life_sciences.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/life_sciences/example_life_sciences.py :language: python :dedent: 0 :start-after: [START howto_configure_simple_action_pipeline] @@ -42,7 +42,7 @@ Here is an example of the pipeline configuration with a single action. The pipeline can also be configured with multiple action. -.. exampleinclude:: /../../tests/system/providers/google/cloud/life_sciences/example_life_sciences.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/life_sciences/example_life_sciences.py :language: python :dedent: 0 :start-after: [START howto_configure_multiple_action_pipeline] @@ -59,7 +59,7 @@ Use the :class:`~airflow.providers.google.cloud.operators.life_sciences.LifeSciencesRunPipelineOperator` to execute pipelines. -.. exampleinclude:: /../../tests/system/providers/google/cloud/life_sciences/example_life_sciences.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/life_sciences/example_life_sciences.py :language: python :dedent: 0 :start-after: [START howto_run_pipeline] diff --git a/docs/apache-airflow-providers-google/operators/cloud/looker.rst b/docs/apache-airflow-providers-google/operators/cloud/looker.rst index f89d23c1c4c29..c8d3c2ad069ac 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/looker.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/looker.rst @@ -49,7 +49,7 @@ To submit a PDT materialization job to Looker you need to provide a model and vi The job configuration can be submitted in synchronous (blocking) mode by using: :class:`~airflow.providers.google.cloud.operators.looker.LookerStartPdtBuildOperator`. -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_looker.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_looker.py :language: python :dedent: 4 :start-after: [START how_to_cloud_looker_start_pdt_build_operator] @@ -60,7 +60,7 @@ Alternatively, the job configuration can be submitted in asynchronous mode by us :class:`~airflow.providers.google.cloud.operators.looker.LookerStartPdtBuildOperator` and :class:`~airflow.providers.google.cloud.sensors.looker.LookerCheckPdtBuildSensor`. -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_looker.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_looker.py :language: python :dedent: 4 :start-after: [START cloud_looker_async_start_pdt_sensor] diff --git a/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst b/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst index f64705e1c267c..0848e1a28741a 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/mlengine.rst @@ -49,7 +49,7 @@ This operator is deprecated. Please, use :class:`~airflow.providers.google.cloud.operators.vertex_ai.custom_job.CreateCustomPythonPackageTrainingJobOperator` instead. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_create_custom_python_training_job_v1] @@ -69,7 +69,7 @@ of any types. For example, you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.custom_job.CreateCustomPythonPackageTrainingJobOperator`. The result of running this operator will be ready-to-use model saved in Model Registry. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_create_custom_python_training_job_v1] @@ -87,7 +87,7 @@ This operator is deprecated. Please, use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.GetModelOperator` instead. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_get_model] @@ -98,7 +98,7 @@ fields to dynamically determine their values. The result are saved to :ref:`XCom allowing them to be used by other operators. In this case, the :class:`~airflow.providers.standard.operators.bash.BashOperator` is used to print the model information. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_print_model] @@ -120,7 +120,7 @@ instead. In this case, the new version of specific model could be created by spe ``parent_model`` parameter when running Training Job. This will ensure that new version of model will be trained except of creating new model. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_create_custom_python_training_job_v1] @@ -129,7 +129,7 @@ of creating new model. The :class:`~airflow.providers.google.cloud.operators.vertex_ai.custom_job.CreateCustomPythonPackageTrainingJobOperator` can also be used to create more versions with varying parameters. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_create_custom_python_training_job_v2] @@ -151,7 +151,7 @@ in format ``projects/{project}/locations/{location}/models/{model_id}@{version_i ``projects/{project}/locations/{location}/models/{model_id}@{version_alias}``. By default, the first model version created will be marked as default. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_default_version] @@ -166,7 +166,7 @@ This operator is deprecated. Please, use instead. You can pass the name of the desired model in ``model_id`` parameter. If the model ID is passed with version aliases, the operator will output all the versions available for this model. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_list_versions] @@ -185,7 +185,7 @@ This operator is deprecated. Please, use :class:`~airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.CreateBatchPredictionJobOperator` instead. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_start_batch_prediction] @@ -204,7 +204,7 @@ This operator is deprecated. Please, use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.DeleteModelVersionOperator` instead. The default version could not be deleted on the model. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_delete_version] @@ -218,7 +218,7 @@ This operator is deprecated. Please, use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.DeleteModelOperator` instead. -.. exampleinclude:: /../../tests/system/providers/google/cloud/ml_engine/example_mlengine.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/ml_engine/example_mlengine.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_mlengine_delete_model] diff --git a/docs/apache-airflow-providers-google/operators/cloud/natural_language.rst b/docs/apache-airflow-providers-google/operators/cloud/natural_language.rst index 7b54e812c2dd5..0ba46ccf76b03 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/natural_language.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/natural_language.rst @@ -44,14 +44,14 @@ representing text. Here is an example of document with text provided as a string: -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :start-after: [START howto_operator_gcp_natural_language_document_text] :end-before: [END howto_operator_gcp_natural_language_document_text] In addition to supplying string, a document can refer to content stored in Google Cloud Storage. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :start-after: [START howto_operator_gcp_natural_language_document_gcs] :end-before: [END howto_operator_gcp_natural_language_document_gcs] @@ -66,7 +66,7 @@ public figures, landmarks, etc.), and returns information about those entities. Entity analysis is performed with the :class:`~airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitiesOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_entities] @@ -77,7 +77,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_entities_result] @@ -94,7 +94,7 @@ as positive, negative, or neutral. Sentiment analysis is performed through the :class:`~airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitySentimentOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_entity_sentiment] @@ -105,7 +105,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_entity_sentiment_result] @@ -123,7 +123,7 @@ through the :class:`~airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageAnalyzeSentimentOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_sentiment] @@ -134,7 +134,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_sentiment_result] @@ -151,7 +151,7 @@ content in a document, use the :class:`~airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageClassifyTextOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_classify_text] @@ -162,7 +162,7 @@ You can use :ref:`Jinja templating ` with parameters which allows you to dynamically determine values. The result is saved to :ref:`XCom `, which allows it to be used by other operators. -.. exampleinclude:: /../../tests/system/providers/google/cloud/natural_language/example_natural_language.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/natural_language/example_natural_language.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_natural_language_analyze_classify_text_result] diff --git a/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst b/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst index 8fb497a14f01e..74091ccc1eec4 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/pubsub.rst @@ -41,7 +41,7 @@ Creating a PubSub topic The PubSub topic is a named resource to which messages are sent by publishers. The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubCreateTopicOperator` operator creates a topic. -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_create_topic] :end-before: [END howto_operator_gcp_pubsub_create_topic] @@ -56,7 +56,7 @@ A ``Subscription`` is a named resource representing the stream of messages from to be delivered to the subscribing application. The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubCreateSubscriptionOperator` operator creates the subscription. -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_create_subscription] :end-before: [END howto_operator_gcp_pubsub_create_subscription] @@ -70,7 +70,7 @@ Publishing PubSub messages A ``Message`` is a combination of data and (optional) attributes that a publisher sends to a topic and is eventually delivered to subscribers. The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubPublishMessageOperator` operator would publish messages. -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_publish] :end-before: [END howto_operator_gcp_pubsub_publish] @@ -83,32 +83,32 @@ Pulling messages from a PubSub subscription The :class:`~airflow.providers.google.cloud.sensors.pubsub.PubSubPullSensor` sensor pulls messages from a PubSub subscription and pass them through XCom. -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_pull_message_with_sensor] :end-before: [END howto_operator_gcp_pubsub_pull_message_with_sensor] Also for this action you can use sensor in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub_deferrable.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub_deferrable.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_pubsub_pull_message_with_async_sensor] :end-before: [END howto_operator_gcp_pubsub_pull_message_with_async_sensor] -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_pull_message_with_operator] :end-before: [END howto_operator_gcp_pubsub_pull_message_with_operator] To pull messages from XCom use the :class:`~airflow.providers.standard.operators.bash.BashOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_pull_messages_result_cmd] :end-before: [END howto_operator_gcp_pubsub_pull_messages_result_cmd] -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_pull_messages_result] :end-before: [END howto_operator_gcp_pubsub_pull_messages_result] @@ -121,7 +121,7 @@ Deleting a PubSub subscription The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubDeleteSubscriptionOperator` operator deletes the subscription. -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_unsubscribe] :end-before: [END howto_operator_gcp_pubsub_unsubscribe] @@ -134,7 +134,7 @@ Deleting a PubSub topic The :class:`~airflow.providers.google.cloud.operators.pubsub.PubSubDeleteTopicOperator` operator deletes topic. -.. exampleinclude:: /../../tests/system/providers/google/cloud/pubsub/example_pubsub.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/pubsub/example_pubsub.py :language: python :start-after: [START howto_operator_gcp_pubsub_delete_topic] :end-before: [END howto_operator_gcp_pubsub_delete_topic] diff --git a/docs/apache-airflow-providers-google/operators/cloud/spanner.rst b/docs/apache-airflow-providers-google/operators/cloud/spanner.rst index b505fbe187f6c..fbb79bbf68eb3 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/spanner.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/spanner.rst @@ -41,7 +41,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_deploy] @@ -50,7 +50,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_deploy_template_fields] @@ -80,7 +80,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_delete] @@ -89,7 +89,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_delete_template_fields] @@ -120,7 +120,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_deploy] @@ -129,7 +129,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_database_deploy_template_fields] @@ -164,13 +164,13 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_update] :end-before: [END howto_operator_spanner_database_update] -.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_database_update_idempotent] @@ -179,7 +179,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_database_update_template_fields] @@ -207,7 +207,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_query] @@ -216,7 +216,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_query_template_fields] @@ -246,7 +246,7 @@ Using the operator You can create the operator with or without project id. If project id is missing it will be retrieved from the Google Cloud connection used. Both variants are shown: -.. exampleinclude:: /../../tests/system/providers/google/cloud/spanner/example_spanner.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/spanner/example_spanner.py :language: python :dedent: 4 :start-after: [START howto_operator_spanner_delete] @@ -255,7 +255,7 @@ it will be retrieved from the Google Cloud connection used. Both variants are sh Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/spanner.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/spanner.py :language: python :dedent: 4 :start-after: [START gcp_spanner_delete_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst b/docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst index 006c21657c098..3114a359de96b 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst @@ -42,14 +42,14 @@ google.cloud.speech_v1.types module for more information, see: https://googleapis.github.io/google-cloud-python/latest/speech/gapic/v1/api.html#google.cloud.speech_v1.SpeechClient.recognize -.. exampleinclude:: /../../tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py :language: python :start-after: [START howto_operator_text_to_speech_api_arguments] :end-before: [END howto_operator_text_to_speech_api_arguments] filename is a simple string argument: -.. exampleinclude:: /../../tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py :language: python :start-after: [START howto_operator_speech_to_text_api_arguments] :end-before: [END howto_operator_speech_to_text_api_arguments] @@ -57,7 +57,7 @@ filename is a simple string argument: Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py :language: python :dedent: 4 :start-after: [START howto_operator_speech_to_text_recognize] @@ -66,7 +66,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/speech_to_text.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/speech_to_text.py :language: python :dedent: 4 :start-after: [START gcp_speech_to_text_synthesize_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst b/docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst index ecad902d4e525..04b732f6ce9c8 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/stackdriver.rst @@ -40,7 +40,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_list_alert_policy] @@ -60,7 +60,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_enable_alert_policy] @@ -80,7 +80,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_disable_alert_policy] @@ -101,7 +101,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_upsert_alert_policy] @@ -120,7 +120,7 @@ Using the operator The name of the alert to be deleted should be given in the format projects//alertPolicies/ -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_delete_alert_policy] @@ -140,7 +140,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_list_notification_channel] @@ -160,7 +160,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_enable_notification_channel] @@ -180,7 +180,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_disable_notification_channel] @@ -201,7 +201,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_disable_notification_channel] @@ -220,7 +220,7 @@ Using the operator You can use this operator with or without project id to fetch all the alert policies. If project id is missing it will be retrieved from Google Cloud connection used. -.. exampleinclude:: /../../tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/stackdriver/example_stackdriver.py :language: python :dedent: 4 :start-after: [START howto_operator_gcp_stackdriver_delete_notification_channel] diff --git a/docs/apache-airflow-providers-google/operators/cloud/tasks.rst b/docs/apache-airflow-providers-google/operators/cloud/tasks.rst index af9f03bc33b77..c67a0b77d9715 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/tasks.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/tasks.rst @@ -41,7 +41,7 @@ Create queue To create new Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueCreateOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START create_queue] @@ -55,7 +55,7 @@ Delete queue To delete Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueDeleteOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START delete_queue] @@ -70,7 +70,7 @@ Resume queue To resume Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueResumeOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START resume_queue] @@ -84,7 +84,7 @@ Pause queue To pause Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueuePauseOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START pause_queue] @@ -98,7 +98,7 @@ Purge queue To purge Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueuePurgeOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START purge_queue] @@ -112,7 +112,7 @@ Get queue To get Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueGetOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START get_queue] @@ -126,7 +126,7 @@ Update queue To update Queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueueUpdateOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START update_queue] @@ -140,7 +140,7 @@ List queues To list all Queues use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksQueuesListOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_queue.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_queue.py :language: python :dedent: 4 :start-after: [START list_queue] @@ -158,7 +158,7 @@ Create task To create new Task in a particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTaskCreateOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START create_task] @@ -172,7 +172,7 @@ Get task To get the Tasks in a particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTaskGetOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START tasks_get] @@ -186,7 +186,7 @@ Run task To run the Task in a particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTaskRunOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START run_task] @@ -200,7 +200,7 @@ List tasks To list all Tasks in a particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTasksListOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START list_tasks] @@ -214,7 +214,7 @@ Delete task To delete the Task from particular queue use :class:`~airflow.providers.google.cloud.operators.tasks.CloudTasksTaskDeleteOperator` -.. exampleinclude:: /../../tests/system/providers/google/cloud/tasks/example_tasks.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/tasks/example_tasks.py :language: python :dedent: 4 :start-after: [START create_task] diff --git a/docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst b/docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst index d200e902b3f94..78bf9e5eaed6e 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst @@ -42,14 +42,14 @@ The ``input``, ``voice`` and ``audio_config`` arguments need to be dicts or obje for more information, see: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/api.html#google.cloud.texttospeech_v1.TextToSpeechClient.synthesize_speech -.. exampleinclude:: /../../tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py :language: python :start-after: [START howto_operator_text_to_speech_api_arguments] :end-before: [END howto_operator_text_to_speech_api_arguments] The ``filename`` argument is a simple string argument: -.. exampleinclude:: /../../tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py :language: python :start-after: [START howto_operator_text_to_speech_gcp_filename] :end-before: [END howto_operator_text_to_speech_gcp_filename] @@ -57,7 +57,7 @@ The ``filename`` argument is a simple string argument: Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py :language: python :dedent: 4 :start-after: [START howto_operator_text_to_speech_synthesize] @@ -66,7 +66,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/text_to_speech.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/text_to_speech.py :language: python :dedent: 4 :start-after: [START gcp_text_to_speech_synthesize_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/translate.rst b/docs/apache-airflow-providers-google/operators/cloud/translate.rst index 518d7d28f72be..579236cb0883c 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/translate.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/translate.rst @@ -40,7 +40,7 @@ Using the operator Basic usage of the operator: -.. exampleinclude:: /../../tests/system/providers/google/cloud/translate/example_translate.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/translate/example_translate.py :language: python :dedent: 4 :start-after: [START howto_operator_translate_text] @@ -49,7 +49,7 @@ Basic usage of the operator: The result of translation is available as dictionary or array of dictionaries accessible via the usual XCom mechanisms of Airflow: -.. exampleinclude:: /../../tests/system/providers/google/cloud/translate/example_translate.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/translate/example_translate.py :language: python :dedent: 4 :start-after: [START howto_operator_translate_access] @@ -59,7 +59,7 @@ XCom mechanisms of Airflow: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/translate.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/translate.py :language: python :dedent: 4 :start-after: [START translate_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst b/docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst index c02f45c252167..d1043d0e51cf3 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst @@ -43,7 +43,7 @@ for more information, see: https://googleapis.github.io/google-cloud-python/late Arguments for translation need to be specified. -.. exampleinclude:: /../../tests/system/providers/google/cloud/translate_speech/example_translate_speech.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/translate_speech/example_translate_speech.py :language: python :start-after: [START howto_operator_translate_speech_arguments] :end-before: [END howto_operator_translate_speech_arguments] @@ -52,7 +52,7 @@ Arguments for translation need to be specified. Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/translate_speech/example_translate_speech.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/translate_speech/example_translate_speech.py :language: python :dedent: 4 :start-after: [START howto_operator_translate_speech] @@ -61,7 +61,7 @@ Using the operator Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/translate_speech.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/translate_speech.py :language: python :dedent: 4 :start-after: [START translate_speech_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/vertex_ai.rst b/docs/apache-airflow-providers-google/operators/cloud/vertex_ai.rst index 8fb76cd80fdee..b6bc94620ff2d 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/vertex_ai.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/vertex_ai.rst @@ -33,7 +33,7 @@ To create a Google VertexAI dataset you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.CreateDatasetOperator`. The operator returns dataset id in :ref:`XCom ` under ``dataset_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_dataset_operator] @@ -42,7 +42,7 @@ The operator returns dataset id in :ref:`XCom ` under ``dataset_i After creating a dataset you can use it to import some data using :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.ImportDataOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_import_data_operator] @@ -51,7 +51,7 @@ After creating a dataset you can use it to import some data using To export dataset you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.ExportDataOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_export_data_operator] @@ -60,7 +60,7 @@ To export dataset you can use To delete dataset you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.DeleteDatasetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_dataset_operator] @@ -69,7 +69,7 @@ To delete dataset you can use To get dataset you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.GetDatasetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_get_dataset_operator] @@ -78,7 +78,7 @@ To get dataset you can use To get a dataset list you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.ListDatasetsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_dataset_operator] @@ -87,7 +87,7 @@ To get a dataset list you can use To update dataset you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.dataset.UpdateDatasetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_update_dataset_operator] @@ -115,7 +115,7 @@ create image you can find by this link: https://cloud.google.com/vertex-ai/docs/ After that you should put link to the image in ``container_uri`` parameter. Also you can type executing command for container which will be created from this image in ``command`` parameter. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_container.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_container_training_job_operator] @@ -124,7 +124,7 @@ for container which will be created from this image in ``command`` parameter. The :class:`~airflow.providers.google.cloud.operators.vertex_ai.custom_job.CreateCustomContainerTrainingJobOperator` also provides the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_container.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_container_training_job_operator_deferrable] @@ -138,7 +138,7 @@ create you can find by this link: https://cloud.google.com/vertex-ai/docs/traini Next you should put link to the package in ``python_package_gcs_uri`` parameter, also ``python_module_name`` parameter should has the name of script which will run your training task. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_python_package_training_job_operator] @@ -147,7 +147,7 @@ parameter should has the name of script which will run your training task. The :class:`~airflow.providers.google.cloud.operators.vertex_ai.custom_job.CreateCustomPythonPackageTrainingJobOperator` also provides the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_python_package_training_job_operator_deferrable] @@ -158,7 +158,7 @@ How to run a Custom Training Job To create and run a Custom Training Job you should put the path to your local training script inside the ``script_path`` parameter. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_training_job_operator] @@ -166,7 +166,7 @@ To create and run a Custom Training Job you should put the path to your local tr The same operation can be performed in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_training_job_operator_deferrable] @@ -176,7 +176,7 @@ Additionally, you can create a new version of an existing Custom Training Job. I Model with another version, instead of creating a new Model in the Model Registry. This can be done by specifying the ``parent_model`` parameter when running a Custom Training Job. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_training_job_v2_operator] @@ -184,7 +184,7 @@ This can be done by specifying the ``parent_model`` parameter when running a Cus The same operation can be performed in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_custom_training_job_v2_deferrable_operator] @@ -194,7 +194,7 @@ The same operation can be performed in the deferrable mode: You can get a list of Training Jobs using :class:`~airflow.providers.google.cloud.operators.vertex_ai.custom_job.ListCustomTrainingJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_custom_training_job_operator] @@ -203,7 +203,7 @@ You can get a list of Training Jobs using If you wish to delete a Custom Training Job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.custom_job.DeleteCustomTrainingJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_custom_training_job_operator] @@ -227,7 +227,7 @@ How to run AutoML Forecasting Training Job Before start running this Job you must prepare and create ``TimeSeries`` dataset. After that you should put dataset id to ``dataset_id`` parameter in operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_auto_ml_forecasting_training_job_operator] @@ -239,7 +239,7 @@ How to run AutoML Image Training Job Before start running this Job you must prepare and create ``Image`` dataset. After that you should put dataset id to ``dataset_id`` parameter in operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_auto_ml_image_training_job_operator] @@ -251,7 +251,7 @@ How to run AutoML Tabular Training Job Before start running this Job you must prepare and create ``Tabular`` dataset. After that you should put dataset id to ``dataset_id`` parameter in operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_auto_ml_tabular_training_job_operator] @@ -274,7 +274,7 @@ How to run AutoML Video Training Job Before start running this Job you must prepare and create ``Video`` dataset. After that you should put dataset id to ``dataset_id`` parameter in operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_auto_ml_video_training_job_operator] @@ -284,7 +284,7 @@ Additionally, you can create new version of existing AutoML Video Training Job. version of existing Model instead of new Model created in Model Registry. This can be done by specifying ``parent_model`` parameter when running AutoML Video Training Job. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_auto_ml_video_training_job_v2_operator] @@ -293,7 +293,7 @@ version of existing Model instead of new Model created in Model Registry. This c You can get a list of AutoML Training Jobs using :class:`~airflow.providers.google.cloud.operators.vertex_ai.auto_ml.ListAutoMLTrainingJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_auto_ml_training_job_operator] @@ -302,7 +302,7 @@ You can get a list of AutoML Training Jobs using If you wish to delete a Auto ML Training Job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.auto_ml.DeleteAutoMLTrainingJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_auto_ml_training_job_operator] @@ -315,7 +315,7 @@ To create a Google VertexAI Batch Prediction Job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.CreateBatchPredictionJobOperator`. The operator returns batch prediction job id in :ref:`XCom ` under ``batch_prediction_job_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_batch_prediction_job_operator] @@ -324,7 +324,7 @@ The operator returns batch prediction job id in :ref:`XCom ` unde The :class:`~airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.CreateBatchPredictionJobOperator` also provides deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_batch_prediction_job_operator_def] @@ -334,7 +334,7 @@ also provides deferrable mode: To delete batch prediction job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.DeleteBatchPredictionJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_batch_prediction_job_operator] @@ -343,7 +343,7 @@ To delete batch prediction job you can use To get a batch prediction job list you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.ListBatchPredictionJobsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_batch_prediction_job_operator] @@ -356,7 +356,7 @@ To create a Google VertexAI endpoint you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.endpoint_service.CreateEndpointOperator`. The operator returns endpoint id in :ref:`XCom ` under ``endpoint_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_endpoint_operator] @@ -365,7 +365,7 @@ The operator returns endpoint id in :ref:`XCom ` under ``endpoint After creating an endpoint you can use it to deploy some model using :class:`~airflow.providers.google.cloud.operators.vertex_ai.endpoint_service.DeployModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_deploy_model_operator] @@ -374,7 +374,7 @@ After creating an endpoint you can use it to deploy some model using To un deploy model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.endpoint_service.UndeployModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_undeploy_model_operator] @@ -383,7 +383,7 @@ To un deploy model you can use To delete endpoint you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.endpoint_service.DeleteEndpointOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_endpoint_operator] @@ -392,7 +392,7 @@ To delete endpoint you can use To get an endpoint list you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.endpoint_service.ListEndpointsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_endpoints_operator] @@ -405,7 +405,7 @@ To create a Google VertexAI hyperparameter tuning job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.hyperparameter_tuning_job.CreateHyperparameterTuningJobOperator`. The operator returns hyperparameter tuning job id in :ref:`XCom ` under ``hyperparameter_tuning_job_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_hyperparameter_tuning_job_operator] @@ -414,7 +414,7 @@ The operator returns hyperparameter tuning job id in :ref:`XCom ` :class:`~airflow.providers.google.cloud.operators.vertex_ai.hyperparameter_tuning_job.CreateHyperparameterTuningJobOperator` also supports deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_create_hyperparameter_tuning_job_operator_deferrable] @@ -423,7 +423,7 @@ also supports deferrable mode: To delete hyperparameter tuning job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.hyperparameter_tuning_job.DeleteHyperparameterTuningJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_hyperparameter_tuning_job_operator] @@ -432,7 +432,7 @@ To delete hyperparameter tuning job you can use To get hyperparameter tuning job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.hyperparameter_tuning_job.GetHyperparameterTuningJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_get_hyperparameter_tuning_job_operator] @@ -441,7 +441,7 @@ To get hyperparameter tuning job you can use To get a hyperparameter tuning job list you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.hyperparameter_tuning_job.ListHyperparameterTuningJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_hyperparameter_tuning_job_operator] @@ -454,7 +454,7 @@ To upload a Google VertexAI model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.UploadModelOperator`. The operator returns model id in :ref:`XCom ` under ``model_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_upload_model_operator] @@ -463,7 +463,7 @@ The operator returns model id in :ref:`XCom ` under ``model_id`` To export model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.ExportModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_export_model_operator] @@ -472,7 +472,7 @@ To export model you can use To delete model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.DeleteModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_model_operator] @@ -481,7 +481,7 @@ To delete model you can use To get a model list you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.ListModelsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_models_operator] @@ -490,7 +490,7 @@ To get a model list you can use To retrieve model by its ID you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.GetModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_get_model_operator] @@ -499,7 +499,7 @@ To retrieve model by its ID you can use To list all model versions you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.ListModelVersionsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_model_versions_operator] @@ -508,7 +508,7 @@ To list all model versions you can use To set a specific version of model as a default one you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.SetDefaultVersionOnModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_set_version_as_default_operator] @@ -517,7 +517,7 @@ To set a specific version of model as a default one you can use To add aliases to specific version of model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.AddVersionAliasesOnModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_add_version_aliases_operator] @@ -526,7 +526,7 @@ To add aliases to specific version of model you can use To delete aliases from specific version of model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.DeleteVersionAliasesOnModelOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_version_aliases_operator] @@ -535,7 +535,7 @@ To delete aliases from specific version of model you can use To delete specific version of model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.model_service.DeleteModelVersionOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_version_operator] @@ -548,7 +548,7 @@ To run a Google VertexAI Pipeline Job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.pipeline_job.RunPipelineJobOperator`. The operator returns pipeline job id in :ref:`XCom ` under ``pipeline_job_id`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_run_pipeline_job_operator] @@ -557,7 +557,7 @@ The operator returns pipeline job id in :ref:`XCom ` under ``pipe To delete pipeline job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.pipeline_job.DeletePipelineJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_delete_pipeline_job_operator] @@ -566,7 +566,7 @@ To delete pipeline job you can use To get pipeline job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.pipeline_job.GetPipelineJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_get_pipeline_job_operator] @@ -575,7 +575,7 @@ To get pipeline job you can use To get a pipeline job list you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.pipeline_job.ListPipelineJobOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_list_pipeline_job_operator] @@ -588,7 +588,7 @@ To generate a prediction via language model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.generative_model.TextGenerationModelPredictOperator`. The operator returns the model's response in :ref:`XCom ` under ``model_response`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_text_generation_model_predict_operator] @@ -598,7 +598,7 @@ To generate text embeddings you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.generative_model.TextEmbeddingModelGetEmbeddingsOperator`. The operator returns the model's response in :ref:`XCom ` under ``model_response`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_text_embedding_model_get_embeddings_operator] @@ -608,7 +608,7 @@ To generate content with a generative model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.generative_model.GenerativeModelGenerateContentOperator`. The operator returns the model's response in :ref:`XCom ` under ``model_response`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_generative_model_generate_content_operator] @@ -618,7 +618,7 @@ To run a supervised fine tuning job you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.generative_model.SupervisedFineTuningTrainOperator`. The operator returns the tuned model's endpoint name in :ref:`XCom ` under ``tuned_model_endpoint_name`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_supervised_fine_tuning_train_operator] @@ -629,7 +629,7 @@ To calculates the number of input tokens before sending a request to the Gemini :class:`~airflow.providers.google.cloud.operators.vertex_ai.generative_model.CountTokensOperator`. The operator returns the total tokens in :ref:`XCom ` under ``total_tokens`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_count_tokens_operator] @@ -639,7 +639,7 @@ To evaluate a model you can use :class:`~airflow.providers.google.cloud.operators.vertex_ai.generative_model.RunEvaluationOperator`. The operator returns the evaluation summary metrics in :ref:`XCom ` under ``summary_metrics`` key. -.. exampleinclude:: /../../tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py :language: python :dedent: 4 :start-after: [START how_to_cloud_vertex_ai_run_evaluation_operator] diff --git a/docs/apache-airflow-providers-google/operators/cloud/video_intelligence.rst b/docs/apache-airflow-providers-google/operators/cloud/video_intelligence.rst index 6b7bd2412e6ef..ea9ca48bd04ae 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/video_intelligence.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/video_intelligence.rst @@ -38,12 +38,12 @@ Using the operator Input uri is an uri to a file in Google Cloud Storage -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :start-after: [START howto_operator_video_intelligence_other_args] :end-before: [END howto_operator_video_intelligence_other_args] -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_labels] @@ -51,7 +51,7 @@ Input uri is an uri to a file in Google Cloud Storage You can use the annotation output via Xcom: -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_labels_result] @@ -60,7 +60,7 @@ You can use the annotation output via Xcom: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/video_intelligence.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/video_intelligence.py :language: python :dedent: 4 :start-after: [START gcp_video_intelligence_detect_labels_template_fields] @@ -87,7 +87,7 @@ Arguments Input uri is an uri to a file in Google Cloud Storage -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :start-after: [START howto_operator_video_intelligence_other_args] :end-before: [END howto_operator_video_intelligence_other_args] @@ -95,7 +95,7 @@ Input uri is an uri to a file in Google Cloud Storage Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_explicit_content] @@ -103,7 +103,7 @@ Using the operator You can use the annotation output via Xcom: -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_explicit_content_result] @@ -112,7 +112,7 @@ You can use the annotation output via Xcom: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/video_intelligence.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/video_intelligence.py :language: python :dedent: 4 :start-after: [START gcp_video_intelligence_detect_explicit_content_template_fields] @@ -139,7 +139,7 @@ Arguments Input uri is an uri to a file in Google Cloud Storage -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :start-after: [START howto_operator_video_intelligence_other_args] :end-before: [END howto_operator_video_intelligence_other_args] @@ -147,7 +147,7 @@ Input uri is an uri to a file in Google Cloud Storage Using the operator """""""""""""""""" -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_video_shots] @@ -155,7 +155,7 @@ Using the operator You can use the annotation output via Xcom: -.. exampleinclude:: /../../tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py :language: python :dedent: 4 :start-after: [START howto_operator_video_intelligence_detect_video_shots_result] @@ -164,7 +164,7 @@ You can use the annotation output via Xcom: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/video_intelligence.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/video_intelligence.py :language: python :dedent: 4 :start-after: [START gcp_video_intelligence_detect_video_shots_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/vision.rst b/docs/apache-airflow-providers-google/operators/cloud/vision.rst index 393f637dc3c55..897b4ead4f16d 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/vision.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/vision.rst @@ -42,17 +42,17 @@ We are using the :class:`~google.cloud.vision_v1.types.Product`, :class:`~google.cloud.vision_v1.types.ProductSet` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_set_import] :end-before: [END howto_operator_vision_product_set_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_import] :end-before: [END howto_operator_vision_product_import] @@ -60,7 +60,7 @@ Google libraries: If ``product_set_id`` and ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_add_product_to_product_set] @@ -68,7 +68,7 @@ If ``product_set_id`` and ``product_id`` was generated by the API it can be extr Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_add_product_to_product_set_2] @@ -78,7 +78,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_add_product_to_product_set_template_fields] @@ -107,18 +107,18 @@ Using the operator We are using the :class:`~google.cloud.vision.enums` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :start-after: [START howto_operator_vision_enums_import] :end-before: [END howto_operator_vision_enums_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_annotate_image] @@ -126,7 +126,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_annotate_image_result] @@ -136,7 +136,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_annotate_image_template_fields] @@ -169,24 +169,24 @@ Using the operator We are using the ``Product`` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_import] :end-before: [END howto_operator_vision_product_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product] :end-before: [END howto_operator_vision_product] The ``product_id`` argument can be omitted (it will be generated by the API): -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_create] @@ -194,7 +194,7 @@ The ``product_id`` argument can be omitted (it will be generated by the API): Or it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_create_2] @@ -204,7 +204,7 @@ Or it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_product_create_template_fields] @@ -239,7 +239,7 @@ Using the operator If ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_delete] @@ -247,7 +247,7 @@ If ``product_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_delete_2] @@ -256,7 +256,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_product_delete_template_fields] @@ -287,7 +287,7 @@ Using the operator If ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_get] @@ -295,7 +295,7 @@ If ``product_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_get_2] @@ -304,7 +304,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_product_get_template_fields] @@ -331,24 +331,24 @@ Using the operator We are using the ``ProductSet`` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_set_import] :end-before: [END howto_operator_vision_product_set_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_set] :end-before: [END howto_operator_vision_product_set] The ``product_set_id`` argument can be omitted (it will be generated by the API): -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_create] @@ -356,7 +356,7 @@ The ``product_set_id`` argument can be omitted (it will be generated by the API) Or it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_create_2] @@ -366,7 +366,7 @@ Or it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_productset_create_template_fields] @@ -395,7 +395,7 @@ Using the operator If ``product_set_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_delete] @@ -403,7 +403,7 @@ If ``product_set_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_delete_2] @@ -412,7 +412,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_productset_delete_template_fields] @@ -439,7 +439,7 @@ Using the operator If ``product_set_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_get] @@ -447,7 +447,7 @@ If ``product_set_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_get_2] @@ -456,7 +456,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_productset_get_template_fields] @@ -495,12 +495,12 @@ Using the operator We are using the ``ProductSet`` object from the Google Cloud Vision library: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_set_import] :end-before: [END howto_operator_vision_product_set_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_set] :end-before: [END howto_operator_vision_product_set] @@ -509,7 +509,7 @@ Initialization of the task: If ``product_set_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_update] @@ -517,7 +517,7 @@ If ``product_set_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_set_update_2] @@ -526,7 +526,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_productset_update_template_fields] @@ -576,19 +576,19 @@ Using the operator We are using the ``Product`` object from the Google Cloud Vision library: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_import] :end-before: [END howto_operator_vision_product_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product] :end-before: [END howto_operator_vision_product] If ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_update] @@ -596,7 +596,7 @@ If ``product_id`` was generated by the API it can be extracted from XCOM: Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_product_update_2] @@ -605,7 +605,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_product_update_template_fields] @@ -632,24 +632,24 @@ Using the operator We are using the :class:`~google.cloud.vision_v1.types.ReferenceImage` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_reference_image_import] :end-before: [END howto_operator_vision_reference_image_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_reference_image] :end-before: [END howto_operator_vision_reference_image] The ``product_set_id`` argument can be omitted (it will be generated by the API): -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_reference_image_create] @@ -657,7 +657,7 @@ The ``product_set_id`` argument can be omitted (it will be generated by the API) Or it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_reference_image_create_2] @@ -667,7 +667,7 @@ Or it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_reference_image_create_template_fields] @@ -694,24 +694,24 @@ Using the operator We are using the :class:`~google.cloud.vision_v1.types.ReferenceImage` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_reference_image_import] :end-before: [END howto_operator_vision_reference_image_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_reference_image] :end-before: [END howto_operator_vision_reference_image] The ``product_set_id`` argument can be omitted (it will be generated by the API): -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_reference_image_delete] @@ -719,7 +719,7 @@ The ``product_set_id`` argument can be omitted (it will be generated by the API) Or it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_reference_image_delete_2] @@ -729,7 +729,7 @@ Or it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_reference_image_create_template_fields] @@ -758,17 +758,17 @@ We are using the :class:`~google.cloud.vision_v1.types.Product`, :class:`~google.cloud.vision_v1.types.ProductSet` and :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_set_import] :end-before: [END howto_operator_vision_product_set_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :start-after: [START howto_operator_vision_product_import] :end-before: [END howto_operator_vision_product_import] @@ -776,7 +776,7 @@ Google libraries: If ``product_set_id`` and ``product_id`` was generated by the API it can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_autogenerated.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_remove_product_from_product_set] @@ -784,7 +784,7 @@ If ``product_set_id`` and ``product_id`` was generated by the API it can be extr Otherwise it can be specified explicitly: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_explicit.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_explicit.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_remove_product_from_product_set_2] @@ -794,7 +794,7 @@ Otherwise it can be specified explicitly: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_remove_product_from_product_set_template_fields] @@ -824,12 +824,12 @@ Using the operator We are using the :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_text] @@ -837,7 +837,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_text_result] @@ -847,7 +847,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_detect_text_set_template_fields] @@ -876,12 +876,12 @@ Using the operator We are using the :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_document_detect_text] @@ -889,7 +889,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_document_detect_text_result] @@ -899,7 +899,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_document_detect_text_set_template_fields] @@ -929,12 +929,12 @@ Using the operator We are using the :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_labels] @@ -942,7 +942,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_labels_result] @@ -952,7 +952,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_detect_labels_template_fields] @@ -981,12 +981,12 @@ Using the operator We are using the :class:`~google.api_core.retry.Retry` objects from Google libraries: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :start-after: [START howto_operator_vision_retry_import] :end-before: [END howto_operator_vision_retry_import] -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_safe_search] @@ -994,7 +994,7 @@ Google libraries: The result can be extracted from XCOM: -.. exampleinclude:: /../../tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/vision/example_vision_annotate_image.py :language: python :dedent: 4 :start-after: [START howto_operator_vision_detect_safe_search_result] @@ -1004,7 +1004,7 @@ The result can be extracted from XCOM: Templating """""""""" -.. literalinclude:: /../../airflow/providers/google/cloud/operators/vision.py +.. literalinclude:: /../../providers/src/airflow/providers/google/cloud/operators/vision.py :language: python :dedent: 4 :start-after: [START vision_detect_safe_search_template_fields] diff --git a/docs/apache-airflow-providers-google/operators/cloud/workflows.rst b/docs/apache-airflow-providers-google/operators/cloud/workflows.rst index 79c7a2072419b..aa391cb0189e0 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/workflows.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/workflows.rst @@ -39,7 +39,7 @@ Create workflow To create a workflow use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCreateWorkflowOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_create_workflow] @@ -47,7 +47,7 @@ To create a workflow use The workflow should be define in similar why to this example: -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 0 :start-after: [START how_to_define_workflow] @@ -65,7 +65,7 @@ Update workflow To update a workflow use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsUpdateWorkflowOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_update_workflow] @@ -79,7 +79,7 @@ Get workflow To get a workflow use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsGetWorkflowOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_get_workflow] @@ -93,7 +93,7 @@ List workflows To list workflows use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsListWorkflowsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_list_workflows] @@ -107,7 +107,7 @@ Delete workflow To delete a workflow use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsDeleteWorkflowOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_delete_workflow] @@ -122,7 +122,7 @@ To create an execution use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCreateExecutionOperator`. This operator is not idempotent due to API limitation. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_create_execution] @@ -131,7 +131,7 @@ This operator is not idempotent due to API limitation. The create operator does not wait for execution to complete. To wait for execution result use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowExecutionSensor`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_wait_for_execution] @@ -145,7 +145,7 @@ Get execution To get an execution use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsGetExecutionOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_get_execution] @@ -160,7 +160,7 @@ To list executions use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsListExecutionsOperator`. By default this operator will return only executions for last 60 minutes. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_list_executions] @@ -174,7 +174,7 @@ Cancel execution To cancel an execution use :class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCancelExecutionOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/workflows/example_workflows.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/workflows/example_workflows.py :language: python :dedent: 4 :start-after: [START how_to_cancel_execution] diff --git a/docs/apache-airflow-providers-google/operators/firebase/firestore.rst b/docs/apache-airflow-providers-google/operators/firebase/firestore.rst index ba7469d9744d0..eb274491d2fd2 100644 --- a/docs/apache-airflow-providers-google/operators/firebase/firestore.rst +++ b/docs/apache-airflow-providers-google/operators/firebase/firestore.rst @@ -41,7 +41,7 @@ Export database Exports a copy of all or a subset of documents from Google Cloud Firestore to Google Cloud Storage is performed with the :class:`~airflow.providers.google.firebase.operators.firestore.CloudFirestoreExportDatabaseOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_firestore.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_firestore.py :language: python :dedent: 4 :start-after: [START howto_operator_export_database_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/leveldb/leveldb.rst b/docs/apache-airflow-providers-google/operators/leveldb/leveldb.rst index 2a4f8b41b3167..9aeb5fcae80bb 100644 --- a/docs/apache-airflow-providers-google/operators/leveldb/leveldb.rst +++ b/docs/apache-airflow-providers-google/operators/leveldb/leveldb.rst @@ -36,7 +36,7 @@ Put key Get, put, delete key or write_batch, create database with comparator or different options in LevelDB is performed with the :class:`~airflow.providers.google.leveldb.operators.leveldb.LevelDBOperator` operator. -.. exampleinclude:: /../../tests/system/providers/google/leveldb/example_leveldb.py +.. exampleinclude:: /../../providers/tests/system/google/leveldb/example_leveldb.py :language: python :dedent: 4 :start-after: [START howto_operator_leveldb_put_key] diff --git a/docs/apache-airflow-providers-google/operators/marketing_platform/analytics_admin.rst b/docs/apache-airflow-providers-google/operators/marketing_platform/analytics_admin.rst index b4aab19897659..fd1fc199b497b 100644 --- a/docs/apache-airflow-providers-google/operators/marketing_platform/analytics_admin.rst +++ b/docs/apache-airflow-providers-google/operators/marketing_platform/analytics_admin.rst @@ -35,7 +35,7 @@ List the Accounts To list accounts from Analytics you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics_admin.GoogleAnalyticsAdminListAccountsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_analytics_admin.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_analytics_admin.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_list_accounts_operator] @@ -53,7 +53,7 @@ Creates a property. To create a property you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics_admin.GoogleAnalyticsAdminCreatePropertyOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_analytics_admin.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_analytics_admin.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_create_property_operator] @@ -71,7 +71,7 @@ Deletes a property. To delete a property you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics_admin.GoogleAnalyticsAdminDeletePropertyOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_analytics_admin.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_analytics_admin.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_delete_property_operator] @@ -89,7 +89,7 @@ Creates a data stream. To create a data stream you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics_admin.GoogleAnalyticsAdminCreateDataStreamOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_analytics_admin.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_analytics_admin.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_create_data_stream_operator] @@ -107,7 +107,7 @@ Deletes a data stream. To delete a data stream you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics_admin.GoogleAnalyticsAdminDeleteDataStreamOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_analytics_admin.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_analytics_admin.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_delete_data_stream_operator] @@ -124,7 +124,7 @@ List Google Ads Links To list Google Ads links you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics_admin.GoogleAnalyticsAdminListGoogleAdsLinksOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_analytics_admin.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_analytics_admin.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_list_google_ads_links] @@ -141,7 +141,7 @@ Get the Google Ads link To list Google Ads links you can use the :class:`~airflow.providers.google.marketing_platform.operators.analytics_admin.GoogleAnalyticsAdminGetGoogleAdsLinkOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_analytics_admin.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_analytics_admin.py :language: python :dedent: 4 :start-after: [START howto_marketing_platform_get_google_ad_link] diff --git a/docs/apache-airflow-providers-google/operators/marketing_platform/campaign_manager.rst b/docs/apache-airflow-providers-google/operators/marketing_platform/campaign_manager.rst index eb3b6b03549e5..15b08b72a57b4 100644 --- a/docs/apache-airflow-providers-google/operators/marketing_platform/campaign_manager.rst +++ b/docs/apache-airflow-providers-google/operators/marketing_platform/campaign_manager.rst @@ -36,7 +36,7 @@ To delete Campaign Manager report you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerDeleteReportOperator`. It deletes a report by its unique ID. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_campaign_manager.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_delete_report_operator] @@ -54,7 +54,7 @@ Downloading a report The :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerDownloadReportOperator`. allows you to download a Campaign Manager to Google Cloud Storage bucket. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_campaign_manager.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_get_report_operator] @@ -72,7 +72,7 @@ Waiting for a report Report are generated asynchronously. To wait for report to be ready for downloading you can use :class:`~airflow.providers.google.marketing_platform.sensors.campaign_manager.GoogleCampaignManagerReportSensor`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_campaign_manager.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_wait_for_operation] @@ -91,7 +91,7 @@ To insert a Campaign Manager report you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerInsertReportOperator`. Running this operator creates a new report. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_campaign_manager.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_insert_report_operator] @@ -111,7 +111,7 @@ Running a report To run Campaign Manager report you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerRunReportOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_campaign_manager.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_run_report_operator] @@ -130,7 +130,7 @@ Inserting conversions To insert Campaign Manager conversions you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerBatchInsertConversionsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_campaign_manager.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_insert_conversions] @@ -149,7 +149,7 @@ Updating conversions To update Campaign Manager conversions you can use the :class:`~airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerBatchUpdateConversionsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_campaign_manager.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_campaign_manager.py :language: python :dedent: 4 :start-after: [START howto_campaign_manager_update_conversions] diff --git a/docs/apache-airflow-providers-google/operators/marketing_platform/display_video.rst b/docs/apache-airflow-providers-google/operators/marketing_platform/display_video.rst index 1669b26cc1ced..4b1f4b81cac8e 100644 --- a/docs/apache-airflow-providers-google/operators/marketing_platform/display_video.rst +++ b/docs/apache-airflow-providers-google/operators/marketing_platform/display_video.rst @@ -33,7 +33,7 @@ Creating a Query To create Display&Video 360 query use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360CreateQueryOperator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_create_query_operator] @@ -52,7 +52,7 @@ Run Query :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360RunQueryOperator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_run_query_report_operator] @@ -71,7 +71,7 @@ Deleting a report To delete Display&Video 360 report use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360DeleteReportOperator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_delete_query_report_operator] @@ -89,7 +89,7 @@ Waiting for query To wait for the report use :class:`~airflow.providers.google.marketing_platform.sensors.display_video.GoogleDisplayVideo360RunQuerySensor`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_wait_run_query_sensor] @@ -107,7 +107,7 @@ Downloading a report To download a report to GCS bucket use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360DownloadReportV2Operator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_get_report_operator] @@ -135,7 +135,7 @@ The operator accepts body request: To download line items in CSV format report use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360DownloadLineItemsOperator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_download_line_items_operator] @@ -154,7 +154,7 @@ Upload line items To run Display&Video 360 uploading line items use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360UploadLineItemsOperator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_upload_line_items_operator] @@ -172,7 +172,7 @@ Create SDF download task To create SDF download task use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360CreateSDFDownloadTaskOperator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_create_sdf_download_task_operator] @@ -191,7 +191,7 @@ Save SDF files in the Google Cloud Storage To save SDF files and save them in the Google Cloud Storage use :class:`~airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360SDFtoGCSOperator`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_save_sdf_in_gcs_operator] @@ -209,7 +209,7 @@ Waiting for SDF operation Wait for SDF operation is executed by: :class:`~airflow.providers.google.marketing_platform.sensors.display_video.GoogleDisplayVideo360GetSDFDownloadOperationSensor`. -.. exampleinclude:: /../../airflow/providers/google/marketing_platform/example_dags/example_display_video.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py :language: python :dedent: 4 :start-after: [START howto_google_display_video_wait_for_operation_sensor] diff --git a/docs/apache-airflow-providers-google/operators/marketing_platform/search_ads.rst b/docs/apache-airflow-providers-google/operators/marketing_platform/search_ads.rst index 928d60f671a54..00fb9d5ab7074 100644 --- a/docs/apache-airflow-providers-google/operators/marketing_platform/search_ads.rst +++ b/docs/apache-airflow-providers-google/operators/marketing_platform/search_ads.rst @@ -34,7 +34,7 @@ Querying a report To query a Search Ads report use the :class:`~airflow.providers.google.marketing_platform.operators.search_ads.GoogleSearchAdsSearchOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_search_ads.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_search_ads.py :language: python :dedent: 4 :start-after: [START howto_search_ads_search_query_reports] @@ -52,7 +52,7 @@ Retrieve a field metadata To retrieve metadata of a field use :class:`~airflow.providers.google.marketing_platform.operators.search_ads.GoogleSearchAdsGetFieldOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_search_ads.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_search_ads.py :language: python :dedent: 4 :start-after: [START howto_search_ads_get_field] @@ -70,7 +70,7 @@ Retrieve metadata for multiple fields To retrieve metadata of multiple fields use the :class:`~airflow.providers.google.marketing_platform.operators.search_ads.GoogleSearchAdsSearchFieldsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_search_ads.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_search_ads.py :language: python :dedent: 4 :start-after: [START howto_search_ads_search_fields] @@ -89,7 +89,7 @@ Retrieve a custom column details To retrieve details of a custom column use :class:`~airflow.providers.google.marketing_platform.operators.search_ads.GoogleSearchAdsGetCustomColumnOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_search_ads.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_search_ads.py :language: python :dedent: 4 :start-after: [START howto_search_ads_get_custom_column] @@ -108,7 +108,7 @@ To retrieve the list of all custom columns use :class:`~airflow.providers.google.marketing_platform.operators.search_ads.GoogleSearchAdsListCustomColumnsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/marketing_platform/example_search_ads.py +.. exampleinclude:: /../../providers/tests/system/google/marketing_platform/example_search_ads.py :language: python :dedent: 4 :start-after: [START howto_search_ads_list_custom_columns] diff --git a/docs/apache-airflow-providers-google/operators/suite/sheets.rst b/docs/apache-airflow-providers-google/operators/suite/sheets.rst index 0f64c2f0b1dd3..0f891a60a3cfe 100644 --- a/docs/apache-airflow-providers-google/operators/suite/sheets.rst +++ b/docs/apache-airflow-providers-google/operators/suite/sheets.rst @@ -44,7 +44,7 @@ Create spreadsheet To create new spreadsheet you can use the :class:`~airflow.providers.google.suite.operators.sheets.GoogleSheetsCreateSpreadsheetOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sheets.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_sheets.py :language: python :dedent: 4 :start-after: [START create_spreadsheet] @@ -55,7 +55,7 @@ You can use :ref:`Jinja templating ` with To get the URL of newly created spreadsheet use XCom value: -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sheets.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_sheets.py :language: python :dedent: 4 :start-after: [START print_spreadsheet_url] diff --git a/docs/apache-airflow-providers-google/operators/transfer/azure_blob_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/azure_blob_to_gcs.rst index 1a7fb4c3062b8..8bd909a809c27 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/azure_blob_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/azure_blob_to_gcs.rst @@ -38,7 +38,7 @@ to transfer data from Azure Blob Storage to Google Cloud Storage. Example usage: -.. exampleinclude:: /../../tests/system/providers/google/cloud/azure/example_azure_blob_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/azure/example_azure_blob_to_gcs.py :language: python :start-after: [START how_to_azure_blob_to_gcs] :end-before: [END how_to_azure_blob_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst index e962e2b88520b..6efc0d091cc48 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst @@ -36,7 +36,7 @@ All parameters are described in the reference documentation - :class:`~airflow.p An example operator call might look like this: -.. exampleinclude:: /../../tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_azure_fileshare_to_gcs_basic] diff --git a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_bigquery.rst b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_bigquery.rst index 23fe393994ebc..f9ce21df0dd00 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_bigquery.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_bigquery.rst @@ -50,7 +50,7 @@ Copying BigQuery tables The following Operator copies data from one or more BigQuery tables to another. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_to_bigquery.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_to_bigquery.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_to_bigquery] diff --git a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_gcs.rst index b0eeb0ac5e346..2e2f3ad41a28d 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_gcs.rst @@ -53,7 +53,7 @@ Exporting tables The following Operator exports BigQuery table into a GCS. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mssql.rst b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mssql.rst index 5fb56ae8701db..382f6fa645886 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mssql.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mssql.rst @@ -53,7 +53,7 @@ Transferring data The following Operator copies data from a BigQuery table to MsSQL. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_to_mssql.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_to_mssql] diff --git a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mysql.rst b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mysql.rst index 263db92d053ad..0790cc65096f2 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mysql.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_mysql.rst @@ -51,7 +51,7 @@ Transferring data The following Operator copies data from a BigQuery table to MySQL. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_to_mysql.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_to_mysql.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_to_mysql] diff --git a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_postgres.rst b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_postgres.rst index 70f0c8c56d05a..d600017a05fb5 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_postgres.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/bigquery_to_postgres.rst @@ -55,7 +55,7 @@ Transferring data The following Operator copies data from a BigQuery table to PostgreSQL. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_to_postgres.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_to_postgres] @@ -63,7 +63,7 @@ The following Operator copies data from a BigQuery table to PostgreSQL. The Operator can also replace data in a PostgreSQL table with matching data from a BigQuery table. -.. exampleinclude:: /../../tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/bigquery/example_bigquery_to_postgres.py :language: python :dedent: 4 :start-after: [START howto_operator_bigquery_to_postgres_upsert] diff --git a/docs/apache-airflow-providers-google/operators/transfer/calendar_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/calendar_to_gcs.rst index 7b420f1e6a046..37569d6c895bc 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/calendar_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/calendar_to_gcs.rst @@ -37,7 +37,7 @@ Upload data from Google Calendar to GCS To upload data from Google Calendar to Google Cloud Storage you can use the :class:`~airflow.providers.google.cloud.transfers.calendar_to_gcs.GoogleCalendarToGCSOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_calendar_to_gcs.py :language: python :dedent: 4 :start-after: [START upload_calendar_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/facebook_ads_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/facebook_ads_to_gcs.rst index 7132417cbb565..003239d676f0a 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/facebook_ads_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/facebook_ads_to_gcs.rst @@ -34,7 +34,7 @@ Use the :class:`~airflow.providers.google.cloud.transfers.facebook_ads_to_gcs.FacebookAdsReportToGcsOperator` to execute a Facebook ads report fetch and load to GCS. -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py :language: python :start-after: [START howto_operator_facebook_ads_to_gcs] :end-before: [END howto_operator_facebook_ads_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_bigquery.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_bigquery.rst index a10b3d046c36d..a05ed77484a36 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_bigquery.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_bigquery.rst @@ -53,7 +53,7 @@ Transferring files The following Operator transfers one or more files from GCS into a BigQuery table. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_bigquery] @@ -61,7 +61,7 @@ The following Operator transfers one or more files from GCS into a BigQuery tabl Also you can use GCSToBigQueryOperator in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery_async.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_bigquery_async] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst index 6e49dc4229e26..3bb97af557d48 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gcs.rst @@ -105,7 +105,7 @@ Note that if the flag ``exact_match=False`` then the ``source_object`` will be c in the ``BUCKET_1_SRC`` GCS bucket. That's why if any will be found, they will be copied as well. To prevent this from happening, please use ``exact_match=False``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_single_file] @@ -121,7 +121,7 @@ are both deprecated. Thus, it is not recommended to use them - but to utilize `` The following example would copy the files that matches the glob pattern in ``data/`` folder from ``BUCKET_1_SRC`` GCS bucket to the ``backup/`` folder in ``BUCKET_1_DST`` bucket. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_match_glob] @@ -130,7 +130,7 @@ The following example would copy the files that matches the glob pattern in ``da The following example would copy all the files in ``subdir/`` folder (i.e subdir/a.csv, subdir/b.csv, subdir/c.csv) from the ``BUCKET_1_SRC`` GCS bucket to the ``backup/`` folder in ``BUCKET_1_DST`` bucket. (i.e backup/a.csv, backup/b.csv, backup/c.csv) -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_without_wildcard] @@ -138,7 +138,7 @@ the ``BUCKET_1_SRC`` GCS bucket to the ``backup/`` folder in ``BUCKET_1_DST`` bu -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_list] @@ -156,7 +156,7 @@ Note that if the flag ``exact_match=False`` then the ``source_object`` will be c in the ``BUCKET_1_SRC`` GCS bucket. That's why if any will be found, they will be copied as well. To prevent this from happening, please use ``exact_match=False``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_single_file_move] @@ -168,7 +168,7 @@ Move multiple files Multiple files may be moved by supplying ``True`` to the ``move`` argument. The same rules concerning wild cards and the ``delimiter`` argument apply to moves as well as copies. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gcs_list_move] @@ -201,7 +201,7 @@ The following example will ensure all files in ``BUCKET_1_SRC``, including any i ``BUCKET_1_DST``. It will not overwrite identically named files in ``BUCKET_1_DST`` if they already exist. It will not delete any files in ``BUCKET_1_DST`` not in ``BUCKET_1_SRC``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_synch_bucket] @@ -214,7 +214,7 @@ This example will ensure all files in ``BUCKET_1_SRC``, including any in subdire ``BUCKET_1_DST``. It will overwrite identically named files in ``BUCKET_1_DST`` if they already exist. It will delete any files in ``BUCKET_1_DST`` not in ``BUCKET_1_SRC``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_synch_full_bucket] @@ -227,7 +227,7 @@ The following example will ensure all files in ``BUCKET_1_SRC``, including any i ``subdir`` folder in ``BUCKET_1_DST``. It will not overwrite identically named files in ``BUCKET_1_DST/subdir`` if they already exist and it will not delete any files in ``BUCKET_1_DST/subdir`` not in ``BUCKET_1_SRC``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_synch_to_subdir] @@ -240,7 +240,7 @@ This example will ensure all files in ``BUCKET_1_SRC/subdir``, including any in in ``BUCKET_1_DST``. It will not overwrite identically named files in ``BUCKET_1_DST`` if they already exist and it will not delete any files in ``BUCKET_1_DST`` not in ``BUCKET_1_SRC/subdir``. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_sync_from_subdir] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gdrive.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gdrive.rst index 8a8edd519af52..6b7467f96d1a7 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gdrive.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_gdrive.rst @@ -47,7 +47,7 @@ Copy single files The following Operator would copy a single file. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gdrive_copy_single_file] @@ -58,7 +58,7 @@ Copy into an existing folder The following Operator would copy a single file into an existing folder with the specified ID. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gdrive_copy_single_file_into_folder] @@ -69,7 +69,7 @@ Copy multiple files The following Operator would copy all the multiples files (i.e. using wildcard). -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gdrive_copy_files] @@ -81,7 +81,7 @@ Move files Using the ``move_object`` parameter allows you to move the files. After copying the file to Google Drive, the original file from the bucket is deleted. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_gdrive_move_files] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst index 9dfebb1608f7b..87a29e54c4984 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_local.rst @@ -37,7 +37,7 @@ data from GCS to local filesystem. Below is an example of using this operator to download a file from GCS. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py :language: python :dedent: 0 :start-after: [START howto_operator_gcs_download_file_task] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sftp.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sftp.rst index a98f537e2bc1d..f1f9ea731e881 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sftp.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sftp.rst @@ -48,7 +48,7 @@ Copying a single file The following Operator copies a single file. -.. exampleinclude:: /../../tests/system/providers/google/cloud/transfers/example_gcs_to_sftp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_sftp_copy_single_file] @@ -61,7 +61,7 @@ To move the file use the ``move_object`` parameter. Once the file is copied to S the original file from the Google Storage is deleted. The ``destination_path`` parameter defines the full path of the file on the SFTP server. -.. exampleinclude:: /../../tests/system/providers/google/cloud/transfers/example_gcs_to_sftp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_sftp_move_single_file_destination] @@ -73,7 +73,7 @@ Copying a directory Use the ``wildcard`` in ``source_path`` parameter to copy a directory. -.. exampleinclude:: /../../tests/system/providers/google/cloud/transfers/example_gcs_to_sftp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_sftp_copy_directory] @@ -85,7 +85,7 @@ Moving specific files Use the ``wildcard`` in ``source_path`` parameter to move the specific files. The ``destination_path`` defines the path that is prefixed to all copied files. -.. exampleinclude:: /../../tests/system/providers/google/cloud/transfers/example_gcs_to_sftp.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_sftp_move_specific_files] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sheets.rst b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sheets.rst index d8fdcfff3e6bc..bd2e47fd6b26e 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sheets.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gcs_to_sheets.rst @@ -38,7 +38,7 @@ Upload data from GCS to Google Sheets To upload data from Google Cloud Storage to Google Spreadsheet you can use the :class:`~airflow.providers.google.suite.transfers.gcs_to_sheets.GCSToGoogleSheetsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_to_sheets.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_to_sheets.py :language: python :dedent: 4 :start-after: [START upload_gcs_to_sheets] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_gcs.rst index c5f3a11808c87..260ec631011fd 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_gcs.rst @@ -47,7 +47,7 @@ The following Operator copies a single file from a shared Google Drive folder to Note that you can transfer a file from the root folder of a shared drive by passing the id of the shared drive to both the ``folder_id`` and ``drive_id`` parameters. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gdrive_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py :language: python :dedent: 4 :start-after: [START upload_gdrive_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_local.rst b/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_local.rst index 9cc69760a1ef2..57b589199d2cc 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_local.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/gdrive_to_local.rst @@ -38,7 +38,7 @@ data from Google Drive to local filesystem. Below is an example of using this operator to download file from Google Drive to Local Filesystem. -.. exampleinclude:: /../../tests/system/providers/google/cloud/transfers/example_gdrive_to_local.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/transfers/example_gdrive_to_local.py :language: python :dedent: 4 :start-after: [START download_from_gdrive_to_local] diff --git a/docs/apache-airflow-providers-google/operators/transfer/local_to_drive.rst b/docs/apache-airflow-providers-google/operators/transfer/local_to_drive.rst index da2fefcacfe46..117a65dd1cc20 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/local_to_drive.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/local_to_drive.rst @@ -38,7 +38,7 @@ When you use this operator, you can upload a list of files. Below is an example of using this operator to upload data from local filesystem to Google Drive. -.. exampleinclude:: /../../tests/system/providers/google/suite/example_local_to_drive.py +.. exampleinclude:: /../../providers/tests/system/google/suite/example_local_to_drive.py :language: python :dedent: 0 :start-after: [START howto_operator_local_to_drive_upload_single_file] diff --git a/docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst index e12726712beb1..c7b215901c5ce 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/local_to_gcs.rst @@ -38,7 +38,7 @@ When you use this operator, you can optionally compress the data being uploaded. Below is an example of using this operator to upload a file to GCS. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py :language: python :dedent: 0 :start-after: [START howto_operator_local_filesystem_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/mssql_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/mssql_to_gcs.rst index 54ceb15ac1ea4..094973306a3ef 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/mssql_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/mssql_to_gcs.rst @@ -36,7 +36,7 @@ data from Microsoft SQL Server database to GCS. Below is an example of using this operator to upload data to GCS. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_mssql_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_mssql_to_gcs.py :language: python :start-after: [START howto_operator_mssql_to_gcs] :end-before: [END howto_operator_mssql_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst index 14bca637c9c5d..391f9ae837222 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst @@ -38,7 +38,7 @@ When you use this operator, you can optionally compress the data being uploaded Below is an example of using this operator to upload data to GCS. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_mysql_to_gcs.py :language: python :dedent: 0 :start-after: [START howto_operator_mysql_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/oracle_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/oracle_to_gcs.rst index 9760e594beef2..538cbce38d65c 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/oracle_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/oracle_to_gcs.rst @@ -38,7 +38,7 @@ When you use this operator, you can optionally compress the data being uploaded Below is an example of using this operator to upload data to GCS. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_oracle_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_oracle_to_gcs.py :language: python :dedent: 0 :start-after: [START howto_operator_oracle_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/postgres_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/postgres_to_gcs.rst index b6b01ce864a80..2a5c3fac6dda5 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/postgres_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/postgres_to_gcs.rst @@ -38,7 +38,7 @@ When you use this operator, you can optionally compress the data being uploaded Below is an example of using this operator to upload data to GCS. -.. exampleinclude:: /../../tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/transfers/example_postgres_to_gcs.py :language: python :dedent: 0 :start-after: [START howto_operator_postgres_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/presto_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/presto_to_gcs.rst index c462ee9abaa6c..538f2dec094d0 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/presto_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/presto_to_gcs.rst @@ -49,7 +49,7 @@ All parameters are described in the reference documentation - :class:`~airflow.p An example operator call might look like this: -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_presto_to_gcs_basic] @@ -67,7 +67,7 @@ You can specify these options by the ``export_format`` parameter. If you want a CSV file to be created, your operator call might look like this: -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_presto_to_gcs_csv] @@ -81,7 +81,7 @@ will be dumped from the database and upload to the bucket. If you want to create a schema file, then an example operator call might look like this: -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_presto_to_gcs_multiple_types] @@ -102,7 +102,7 @@ maximum allowed file size for a single object. If you want to create 10 MB files, your code might look like this: -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_read_data_from_gcs_many_chunks] @@ -123,7 +123,7 @@ For example, if you want to create an external table that allows you to create q read data directly from GCS, then you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateExternalTableOperator`. Using this operator looks like this: -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_create_external_table_multiple_types] diff --git a/docs/apache-airflow-providers-google/operators/transfer/s3_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/s3_to_gcs.rst index dbd2f603bb9ed..9dff4cbb6ab9b 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/s3_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/s3_to_gcs.rst @@ -32,7 +32,7 @@ Prerequisite Tasks Use the :class:`~airflow.providers.google.cloud.transfers.s3_to_gcs.S3ToGCSOperator` to transfer data from Amazon S3 to Google Cloud Storage. -.. exampleinclude::/../tests/system/providers/google/cloud/gcs/example_s3_to_gcs.py +.. exampleinclude::/../providers/tests/system/google/cloud/gcs/example_s3_to_gcs.py :language: python :start-after: [START howto_transfer_s3togcs_operator] :end-before: [END howto_transfer_s3togcs_operator] @@ -42,7 +42,7 @@ There is a possibility to start S3ToGCSOperator asynchronously using deferrable Transfer Service. By changing parameter ``poll_interval=10`` you can control frequency of polling a transfer job status. -.. exampleinclude::/../tests/system/providers/google/cloud/gcs/example_s3_to_gcs.py +.. exampleinclude::/../providers/tests/system/google/cloud/gcs/example_s3_to_gcs.py :language: python :start-after: [START howto_transfer_s3togcs_operator_async] :end-before: [END howto_transfer_s3togcs_operator_async] diff --git a/docs/apache-airflow-providers-google/operators/transfer/salesforce_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/salesforce_to_gcs.rst index 62cb4c97c6df2..94f4180f91a64 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/salesforce_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/salesforce_to_gcs.rst @@ -32,7 +32,7 @@ Use the :class:`~airflow.providers.google.cloud.transfers.salesforce_to_gcs.SalesforceToGcsOperator` to execute a Salesforce query to fetch data and load it to GCS. -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py :language: python :start-after: [START howto_operator_salesforce_to_gcs] :end-before: [END howto_operator_salesforce_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/sftp_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/sftp_to_gcs.rst index 0fd8016b78f4d..cf4d237dd68d3 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/sftp_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/sftp_to_gcs.rst @@ -46,7 +46,7 @@ Copying single files The following Operator copies a single file. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_to_gcs_copy_single_file] @@ -59,7 +59,7 @@ To move the file use the ``move_object`` parameter. Once the file is copied to G the original file from the SFTP is deleted. The ``destination_path`` parameter defines the full path of the file in the bucket. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_to_gcs_move_single_file_destination] @@ -71,7 +71,7 @@ Copying directory Use the ``wildcard`` in ``source_path`` parameter to copy the directory. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_to_gcs_copy_directory] @@ -87,7 +87,7 @@ e.g. ``tests_sftp_hook_dir/subdir/parent-1.bin`` is copied to ``specific_files/p and ``tests_sftp_hook_dir/subdir/parent-2.bin`` is copied to ``specific_files/parent-2.bin`` . ``tests_sftp_hook_dir/subdir/parent-3.txt`` is skipped. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_to_gcs_move_specific_files] diff --git a/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst index 862028c544e02..0a3389b79b50c 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst @@ -38,7 +38,7 @@ Upload data from Google Sheets to GCS To upload data from Google Spreadsheet to Google Cloud Storage you can use the :class:`~airflow.providers.google.cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_sheets_to_gcs.py :language: python :dedent: 4 :start-after: [START upload_sheet_to_gcs] diff --git a/docs/apache-airflow-providers-google/operators/transfer/sql_to_sheets.rst b/docs/apache-airflow-providers-google/operators/transfer/sql_to_sheets.rst index 5ce4f19f01a8c..1758c0fc6a417 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/sql_to_sheets.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/sql_to_sheets.rst @@ -35,7 +35,7 @@ Upload data from SQL to Google Sheets To upload data from and Database using SQL to Google Spreadsheet you can use the :class:`~airflow.providers.google.suite.transfers.sql_to_sheets.SQLToGoogleSheetsOperator`. -.. exampleinclude:: /../../tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/sql_to_sheets/example_sql_to_sheets.py :language: python :dedent: 4 :start-after: [START upload_sql_to_sheets] diff --git a/docs/apache-airflow-providers-google/operators/transfer/trino_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/trino_to_gcs.rst index a1177eaad7a59..f881dab7f77d9 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/trino_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/trino_to_gcs.rst @@ -49,7 +49,7 @@ All parameters are described in the reference documentation - :class:`~airflow.p An example operator call might look like this: -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_trino_to_gcs_basic] @@ -67,7 +67,7 @@ You can specify these options by the ``export_format`` parameter. If you want a CSV file to be created, your operator call might look like this: -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_trino_to_gcs_csv] @@ -81,7 +81,7 @@ will be dumped from the database and upload to the bucket. If you want to create a schema file, then an example operator call might look like this: -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_trino_to_gcs_multiple_types] @@ -102,7 +102,7 @@ maximum allowed file size for a single object. If you want to create 10 MB files, your code might look like this: -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_read_data_from_gcs_many_chunks] @@ -123,7 +123,7 @@ For example, if you want to create an external table that allows you to create q read data directly from GCS, then you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateExternalTableOperator`. Using this operator looks like this: -.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py +.. exampleinclude:: /../../providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py :language: python :dedent: 4 :start-after: [START howto_operator_create_external_table_multiple_types] diff --git a/docs/apache-airflow-providers-google/sensors/google-cloud-tasks.rst b/docs/apache-airflow-providers-google/sensors/google-cloud-tasks.rst index 02dd8602e2024..3e015ba063481 100644 --- a/docs/apache-airflow-providers-google/sensors/google-cloud-tasks.rst +++ b/docs/apache-airflow-providers-google/sensors/google-cloud-tasks.rst @@ -32,7 +32,7 @@ Google Cloud Tasks Empty Sensor To sense Queue being empty use :class:`~airflow.providers.google.cloud.sensor.tasks.TaskQueueEmptySensor` -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_cloud_task.py +.. exampleinclude:: /../../providers/src/airflow/providers/google/cloud/example_dags/example_cloud_task.py :language: python :dedent: 4 :start-after: [START cloud_tasks_empty_sensor] diff --git a/docs/apache-airflow-providers-grpc/changelog.rst b/docs/apache-airflow-providers-grpc/changelog.rst index c5b39cf9bfc91..7c93191ed4f0d 100644 --- a/docs/apache-airflow-providers-grpc/changelog.rst +++ b/docs/apache-airflow-providers-grpc/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/grpc/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/grpc/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-hashicorp/changelog.rst b/docs/apache-airflow-providers-hashicorp/changelog.rst index 7e670c6443e31..a2ab3e789158c 100644 --- a/docs/apache-airflow-providers-hashicorp/changelog.rst +++ b/docs/apache-airflow-providers-hashicorp/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/hashicorp/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/hashicorp/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-http/changelog.rst b/docs/apache-airflow-providers-http/changelog.rst index 31947ff163d81..631df66a668d6 100644 --- a/docs/apache-airflow-providers-http/changelog.rst +++ b/docs/apache-airflow-providers-http/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/http/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/http/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-http/index.rst b/docs/apache-airflow-providers-http/index.rst index c99999083f0db..2e7bdf6b78d88 100644 --- a/docs/apache-airflow-providers-http/index.rst +++ b/docs/apache-airflow-providers-http/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/http/index> + System Tests <_api/tests/system/http/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-http/operators.rst b/docs/apache-airflow-providers-http/operators.rst index 3f52ca0a62d58..87473dc48086a 100644 --- a/docs/apache-airflow-providers-http/operators.rst +++ b/docs/apache-airflow-providers-http/operators.rst @@ -32,14 +32,14 @@ to ``true``. Here we are poking until httpbin gives us a response text containing ``httpbin``. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_http_sensor_check] :end-before: [END howto_operator_http_http_sensor_check] This sensor can also be used in deferrable mode -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_http_sensor_check_deferrable] :end-before: [END howto_operator_http_http_sensor_check_deferrable] @@ -76,14 +76,14 @@ the response text back. In the first example we are calling a ``POST`` with json data and succeed when we get the same json data back otherwise the task will fail. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_task_post_op] :end-before: [END howto_operator_http_task_post_op] Here we are calling a ``GET`` request and pass params to it. The task will succeed regardless of the response text. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_task_get_op] :end-before: [END howto_operator_http_task_get_op] @@ -98,7 +98,7 @@ it on the next task downstream use ``response_filter``. This is useful if: Below is an example of retrieving data from a REST API and only returning a nested property instead of the full response body. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_task_get_op_response_filter] :end-before: [END howto_operator_http_task_get_op_response_filter] @@ -106,7 +106,7 @@ response body. In the third example we are performing a ``PUT`` operation to put / set data according to the data that is being provided to the request. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_task_put_op] :end-before: [END howto_operator_http_task_put_op] @@ -114,14 +114,14 @@ provided to the request. In this example we call a ``DELETE`` operation to the ``delete`` endpoint. This time we are passing form data to the request. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_task_del_op] :end-before: [END howto_operator_http_task_del_op] Here we pass form data to a ``POST`` operation which is equal to a usual form submit. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_task_post_op_formenc] :end-before: [END howto_operator_http_task_post_op_formenc] @@ -140,7 +140,7 @@ You can write a ``pagination_function`` that will receive the raw ``request.Resp generate new request parameters (as ``dict``) based on this cursor. The HttpOperator will repeat calls to the API until the function stop returning anything. -.. exampleinclude:: /../../tests/system/providers/http/example_http.py +.. exampleinclude:: /../../providers/tests/system/http/example_http.py :language: python :start-after: [START howto_operator_http_pagination_function] :end-before: [END howto_operator_http_pagination_function] diff --git a/docs/apache-airflow-providers-imap/changelog.rst b/docs/apache-airflow-providers-imap/changelog.rst index f10bae15755c4..1cfb71752efa5 100644 --- a/docs/apache-airflow-providers-imap/changelog.rst +++ b/docs/apache-airflow-providers-imap/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/imap/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/imap/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-influxdb/changelog.rst b/docs/apache-airflow-providers-influxdb/changelog.rst index 32fe8a7a55e38..e1798cda0d09a 100644 --- a/docs/apache-airflow-providers-influxdb/changelog.rst +++ b/docs/apache-airflow-providers-influxdb/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/influxdb/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/influxdb/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-influxdb/index.rst b/docs/apache-airflow-providers-influxdb/index.rst index 8df098b8481cd..3ab429376c05e 100644 --- a/docs/apache-airflow-providers-influxdb/index.rst +++ b/docs/apache-airflow-providers-influxdb/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/influxdb/index> + System Tests <_api/tests/system/influxdb/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-influxdb/operators/index.rst b/docs/apache-airflow-providers-influxdb/operators/index.rst index b3d0350477dca..d05fb6d1d9159 100644 --- a/docs/apache-airflow-providers-influxdb/operators/index.rst +++ b/docs/apache-airflow-providers-influxdb/operators/index.rst @@ -27,7 +27,7 @@ SQL commands in a `InfluxDB `__ database. An example of running the query using the operator: -.. exampleinclude:: /../../tests/system/providers/influxdb/example_influxdb_query.py +.. exampleinclude:: /../../providers/tests/system/influxdb/example_influxdb_query.py :language: python :start-after: [START howto_operator_influxdb] :end-before: [END howto_operator_influxdb] diff --git a/docs/apache-airflow-providers-jdbc/changelog.rst b/docs/apache-airflow-providers-jdbc/changelog.rst index 6697e52b7f0b6..916dba68d1b1f 100644 --- a/docs/apache-airflow-providers-jdbc/changelog.rst +++ b/docs/apache-airflow-providers-jdbc/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/jdbc/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/jdbc/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-jdbc/index.rst b/docs/apache-airflow-providers-jdbc/index.rst index e31369841cd7c..3010c1e5d0802 100644 --- a/docs/apache-airflow-providers-jdbc/index.rst +++ b/docs/apache-airflow-providers-jdbc/index.rst @@ -50,14 +50,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/jdbc/index> + System Tests <_api/tests/system/jdbc/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-jdbc/operators.rst b/docs/apache-airflow-providers-jdbc/operators.rst index ea7297635b915..eba54d065d0c7 100644 --- a/docs/apache-airflow-providers-jdbc/operators.rst +++ b/docs/apache-airflow-providers-jdbc/operators.rst @@ -73,7 +73,7 @@ commands against a database (or data storage) accessible via a JDBC driver. The :doc:`JDBC Connection ` must be passed as ``conn_id``. -.. exampleinclude:: /../../tests/system/providers/jdbc/example_jdbc_queries.py +.. exampleinclude:: /../../providers/tests/system/jdbc/example_jdbc_queries.py :language: python :start-after: [START howto_operator_jdbc] :end-before: [END howto_operator_jdbc] @@ -91,7 +91,7 @@ Templating You can use :ref:`Jinja templates ` to parameterize ``sql``. -.. exampleinclude:: /../../tests/system/providers/jdbc/example_jdbc_queries.py +.. exampleinclude:: /../../providers/tests/system/jdbc/example_jdbc_queries.py :language: python :start-after: [START howto_operator_jdbc_template] :end-before: [END howto_operator_jdbc_template] diff --git a/docs/apache-airflow-providers-jenkins/changelog.rst b/docs/apache-airflow-providers-jenkins/changelog.rst index 4f556ba51445e..40dd4c7db5d9b 100644 --- a/docs/apache-airflow-providers-jenkins/changelog.rst +++ b/docs/apache-airflow-providers-jenkins/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/jenkins/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/jenkins/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-jenkins/index.rst b/docs/apache-airflow-providers-jenkins/index.rst index d890406508df5..1aa841a5d8f84 100644 --- a/docs/apache-airflow-providers-jenkins/index.rst +++ b/docs/apache-airflow-providers-jenkins/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/jenkins/index> + System Tests <_api/tests/system/jenkins/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-microsoft-azure/changelog.rst b/docs/apache-airflow-providers-microsoft-azure/changelog.rst index 4456c720c8840..8b73842cf5a9e 100644 --- a/docs/apache-airflow-providers-microsoft-azure/changelog.rst +++ b/docs/apache-airflow-providers-microsoft-azure/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/microsoft/azure/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/microsoft/azure/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-microsoft-azure/index.rst b/docs/apache-airflow-providers-microsoft-azure/index.rst index 7a073a3b72341..5eb7c53813f3c 100644 --- a/docs/apache-airflow-providers-microsoft-azure/index.rst +++ b/docs/apache-airflow-providers-microsoft-azure/index.rst @@ -54,14 +54,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/microsoft/azure/index> + System Tests <_api/tests/system/microsoft/azure/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/adf_run_pipeline.rst b/docs/apache-airflow-providers-microsoft-azure/operators/adf_run_pipeline.rst index 024873d597a55..0feb5802093ad 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/adf_run_pipeline.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/adf_run_pipeline.rst @@ -31,7 +31,7 @@ This functionality can be disabled for an asynchronous wait -- typically with th Below is an example of using this operator to execute an Azure Data Factory pipeline. - .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adf_run_pipeline.py + .. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adf_run_pipeline.py :language: python :dedent: 0 :start-after: [START howto_operator_adf_run_pipeline] @@ -40,7 +40,7 @@ Below is an example of using this operator to execute an Azure Data Factory pipe Below is an example of using this operator to execute an Azure Data Factory pipeline with a deferrable flag so that polling for the status of the pipeline run occurs on the Airflow Triggerer. - .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adf_run_pipeline.py + .. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adf_run_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_adf_run_pipeline_with_deferrable_flag] @@ -48,7 +48,7 @@ so that polling for the status of the pipeline run occurs on the Airflow Trigger Here is a different example of using this operator to execute a pipeline but coupled with the :class:`~airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunStatusSensor` to perform an asynchronous wait. - .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adf_run_pipeline.py + .. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adf_run_pipeline.py :language: python :dedent: 0 :start-after: [START howto_operator_adf_run_pipeline_async] @@ -56,7 +56,7 @@ Here is a different example of using this operator to execute a pipeline but cou Also you can use deferrable mode in :class:`~airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunStatusSensor` if you would like to free up the worker slots while the sensor is running. - .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adf_run_pipeline.py + .. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adf_run_pipeline.py :language: python :dedent: 0 :start-after: [START howto_operator_adf_run_pipeline_async] @@ -72,7 +72,7 @@ status of a data factory pipeline run asynchronously. This sensor will free up t polling for job status happens on the Airflow triggerer, leading to efficient utilization of resources within Airflow. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adf_run_pipeline.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adf_run_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_adf_run_pipeline_async] diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/adls.rst b/docs/apache-airflow-providers-microsoft-azure/operators/adls.rst index 24b6a3ac5603e..4209bcf3096ac 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/adls.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/adls.rst @@ -35,7 +35,7 @@ upload data to Azure DataLake Storage Below is an example of using this operator to upload data to ADL. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adls_create.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adls_create.py :language: python :dedent: 0 :start-after: [START howto_operator_adls_create] @@ -52,7 +52,7 @@ file(s) from Azure DataLake Storage Below is an example of using this operator to delete a file from ADL. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adls_delete.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adls_delete.py :language: python :dedent: 0 :start-after: [START howto_operator_adls_delete] @@ -69,7 +69,7 @@ file(s) from Azure DataLake Storage Below is an example of using this operator to list files from ADL. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adls_list.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_adls_list.py :language: python :dedent: 0 :start-after: [START howto_operator_adls_list] diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/asb.rst b/docs/apache-airflow-providers-microsoft-azure/operators/asb.rst index 9adbbddc4c92e..c7cdfec9e7598 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/asb.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/asb.rst @@ -41,7 +41,7 @@ To create Azure service bus queue with specific Parameter you can use Below is an example of using this operator to execute an Azure Service Bus Create Queue. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_create_service_bus_queue] @@ -58,7 +58,7 @@ To Send message or list of message or batch Message to the Azure Service Bus Que Below is an example of using this operator to execute an Azure Service Bus Send Message to Queue. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_send_message_to_service_bus_queue] @@ -75,7 +75,7 @@ To Receive Message or list of message or Batch message in a Queue you can use Below is an example of using this operator to execute an Azure Service Bus Create Queue. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_receive_message_service_bus_queue] @@ -92,7 +92,7 @@ To Delete the Azure service bus queue you can use Below is an example of using this operator to execute an Azure Service Bus Delete Queue. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_service_bus_queue] @@ -113,7 +113,7 @@ To create Azure service bus topic with specific Parameter you can use Below is an example of using this operator to execute an Azure Service Bus Create Topic. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_create_service_bus_topic] @@ -129,7 +129,7 @@ To Delete the Azure service bus topic you can use Below is an example of using this operator to execute an Azure Service Bus Delete topic. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_service_bus_topic] @@ -150,7 +150,7 @@ To create Azure service bus topic Subscription with specific Parameter you can u Below is an example of using this operator to execute an Azure Service Bus Create Subscription. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_create_service_bus_subscription] @@ -166,7 +166,7 @@ To Update the Azure service bus topic Subscription which is already created, wit Below is an example of using this operator to execute an Azure Service Bus Update Subscription. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_update_service_bus_subscription] @@ -182,7 +182,7 @@ To Receive a Batch messages from a Service Bus Subscription under specific Topic Below is an example of using this operator to execute an Azure Service Bus Receive Subscription Message. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_receive_message_service_bus_subscription] @@ -198,7 +198,7 @@ To Delete the Azure service bus topic Subscription you can use Below is an example of using this operator to execute an Azure Service Bus Delete Subscription under topic. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_service_bus.py :language: python :dedent: 4 :start-after: [START howto_operator_delete_service_bus_subscription] diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/azure_synapse.rst b/docs/apache-airflow-providers-microsoft-azure/operators/azure_synapse.rst index 440e173d04a78..f3a2a3affd760 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/azure_synapse.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/azure_synapse.rst @@ -34,7 +34,7 @@ terminate with a "Succeeded" status. Below is an example of using this operator to execute a Spark application on Azure Synapse. - .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_synapse.py + .. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_synapse.py :language: python :dedent: 4 :start-after: [START howto_operator_azure_synapse] @@ -46,7 +46,7 @@ AzureSynapseRunPipelineOperator Use the: class:`~airflow.providers.microsoft.azure.operators.synapse.AzureSynapseRunPipelineOperator` to execute a pipeline application within Synapse Analytics. The operator will Execute a Synapse Pipeline. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_synapse_run_pipeline.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_synapse_run_pipeline.py :language: python :dedent: 4 :start-after: [START howto_operator_azure_synapse_run_pipeline] diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/batch.rst b/docs/apache-airflow-providers-microsoft-azure/operators/batch.rst index 5fa5a3808d906..32d40ecbe5ba7 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/batch.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/batch.rst @@ -26,7 +26,7 @@ Use the Below is an example of using this operator to trigger a task on Azure Batch -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_batch_operator.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_batch_operator.py :language: python :dedent: 0 :start-after: [START howto_azure_batch_operator] diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/msgraph.rst b/docs/apache-airflow-providers-microsoft-azure/operators/msgraph.rst index 56a9259f93143..a91a833855f70 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/msgraph.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/msgraph.rst @@ -34,7 +34,7 @@ Use the Below is an example of using this operator to get a Sharepoint site. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_msgraph.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_msgraph.py :language: python :dedent: 0 :start-after: [START howto_operator_graph_site] @@ -42,7 +42,7 @@ Below is an example of using this operator to get a Sharepoint site. Below is an example of using this operator to get a Sharepoint site pages. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_msgraph.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_msgraph.py :language: python :dedent: 0 :start-after: [START howto_operator_graph_site_pages] @@ -50,7 +50,7 @@ Below is an example of using this operator to get a Sharepoint site pages. Below is an example of using this operator to get PowerBI workspaces. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_powerbi.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_powerbi.py :language: python :dedent: 0 :start-after: [START howto_operator_powerbi_workspaces] @@ -58,7 +58,7 @@ Below is an example of using this operator to get PowerBI workspaces. Below is an example of using this operator to get PowerBI workspaces info. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_powerbi.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_powerbi.py :language: python :dedent: 0 :start-after: [START howto_operator_powerbi_workspaces_info] @@ -66,7 +66,7 @@ Below is an example of using this operator to get PowerBI workspaces info. Below is an example of using this operator to refresh PowerBI dataset. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_powerbi.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_powerbi.py :language: python :dedent: 0 :start-after: [START howto_operator_powerbi_refresh_dataset] @@ -74,7 +74,7 @@ Below is an example of using this operator to refresh PowerBI dataset. Below is an example of using this operator to create an item schedule in Fabric. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_msfabric.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_msfabric.py :language: python :dedent: 0 :start-after: [START howto_operator_ms_fabric_create_item_schedule] diff --git a/docs/apache-airflow-providers-microsoft-azure/sensors/cosmos_document_sensor.rst b/docs/apache-airflow-providers-microsoft-azure/sensors/cosmos_document_sensor.rst index c0a7d140cc22c..f15064d4852ff 100644 --- a/docs/apache-airflow-providers-microsoft-azure/sensors/cosmos_document_sensor.rst +++ b/docs/apache-airflow-providers-microsoft-azure/sensors/cosmos_document_sensor.rst @@ -28,7 +28,7 @@ Azure Cosmos Document Sensor Checks for the existence of a document which matches the given query in CosmosDB. :class:`~airflow.providers.microsoft.azure.sensors.cosmos.AzureCosmosDocumentSensor` -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_cosmosdb.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_azure_cosmosdb.py :language: python :dedent: 4 :start-after: [START cosmos_document_sensor] diff --git a/docs/apache-airflow-providers-microsoft-azure/sensors/msgraph.rst b/docs/apache-airflow-providers-microsoft-azure/sensors/msgraph.rst index 4ddad88f19fa1..f3a6e5c8c99e6 100644 --- a/docs/apache-airflow-providers-microsoft-azure/sensors/msgraph.rst +++ b/docs/apache-airflow-providers-microsoft-azure/sensors/msgraph.rst @@ -27,7 +27,7 @@ Use the Below is an example of using this sensor to poll the status of a PowerBI workspace. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_powerbi.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_powerbi.py :language: python :dedent: 0 :start-after: [START howto_sensor_powerbi_scan_status] diff --git a/docs/apache-airflow-providers-microsoft-azure/sensors/wasb_sensors.rst b/docs/apache-airflow-providers-microsoft-azure/sensors/wasb_sensors.rst index c01adeab1a89f..2f79f6df6b20b 100644 --- a/docs/apache-airflow-providers-microsoft-azure/sensors/wasb_sensors.rst +++ b/docs/apache-airflow-providers-microsoft-azure/sensors/wasb_sensors.rst @@ -29,7 +29,7 @@ Wasb Blob Sensor Waits for a blob to arrive on Azure Blob Storage. :class:`~airflow.providers.microsoft.azure.sensors.wasb.WasbBlobSensor` -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_wasb_sensors.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_wasb_sensors.py :language: python :dedent: 4 :start-after: [START wasb_blob_sensor] @@ -41,7 +41,7 @@ Wasb Prefix Sensor Waits for blobs matching a prefix to arrive on Azure Blob Storage. :class:`~airflow.providers.microsoft.azure.sensors.wasb.WasbPrefixSensor` -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_wasb_sensors.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_wasb_sensors.py :language: python :dedent: 4 :start-after: [START wasb_prefix_sensor] diff --git a/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_adls.rst b/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_adls.rst index 76a06133d21db..19f4c36d15c27 100644 --- a/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_adls.rst +++ b/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_adls.rst @@ -38,7 +38,7 @@ upload data from local filesystem to ADL. Below is an example of using this operator to upload a file to ADL. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_local_to_adls.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_local_to_adls.py :language: python :dedent: 0 :start-after: [START howto_operator_local_to_adls] diff --git a/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_wasb.rst b/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_wasb.rst index 970a6298342ed..fed25a5797ae6 100644 --- a/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_wasb.rst +++ b/docs/apache-airflow-providers-microsoft-azure/transfer/local_to_wasb.rst @@ -36,7 +36,7 @@ upload data from local filesystem to Azure Blob Storage. Below is an example of using this operator to upload a file to Azure Blob Storage. -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_local_to_wasb.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_local_to_wasb.py :language: python :dedent: 0 :start-after: [START howto_operator_local_to_wasb] diff --git a/docs/apache-airflow-providers-microsoft-azure/transfer/s3_to_wasb.rst b/docs/apache-airflow-providers-microsoft-azure/transfer/s3_to_wasb.rst index 59a394b0935fb..e206611389530 100644 --- a/docs/apache-airflow-providers-microsoft-azure/transfer/s3_to_wasb.rst +++ b/docs/apache-airflow-providers-microsoft-azure/transfer/s3_to_wasb.rst @@ -47,7 +47,7 @@ To copy data from an Amazon AWS S3 Bucket to an Azure Blob Storage container, th Example usage: -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_s3_to_wasb.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_s3_to_wasb.py :language: python :dedent: 4 :start-after: [START howto_transfer_s3_to_wasb] diff --git a/docs/apache-airflow-providers-microsoft-azure/transfer/sftp_to_wasb.rst b/docs/apache-airflow-providers-microsoft-azure/transfer/sftp_to_wasb.rst index a79500de6a3bc..a4699655a3d3d 100644 --- a/docs/apache-airflow-providers-microsoft-azure/transfer/sftp_to_wasb.rst +++ b/docs/apache-airflow-providers-microsoft-azure/transfer/sftp_to_wasb.rst @@ -54,7 +54,7 @@ To get information about jobs within a Azure Blob Storage use: :class:`~airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator` Example usage: -.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_sftp_to_wasb.py +.. exampleinclude:: /../../providers/tests/system/microsoft/azure/example_sftp_to_wasb.py :language: python :dedent: 4 :start-after: [START how_to_sftp_to_wasb] diff --git a/docs/apache-airflow-providers-microsoft-mssql/changelog.rst b/docs/apache-airflow-providers-microsoft-mssql/changelog.rst index d7b97646ce1a3..5c5b6c990874b 100644 --- a/docs/apache-airflow-providers-microsoft-mssql/changelog.rst +++ b/docs/apache-airflow-providers-microsoft-mssql/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/microsoft/mssql/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/microsoft/mssql/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-microsoft-mssql/index.rst b/docs/apache-airflow-providers-microsoft-mssql/index.rst index a1032303afe23..f14c3caf7f2d1 100644 --- a/docs/apache-airflow-providers-microsoft-mssql/index.rst +++ b/docs/apache-airflow-providers-microsoft-mssql/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/microsoft/mssql/index> + System Tests <_api/tests/system/microsoft/mssql/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-microsoft-mssql/operators.rst b/docs/apache-airflow-providers-microsoft-mssql/operators.rst index 9d2189c7719bf..560e196731b57 100644 --- a/docs/apache-airflow-providers-microsoft-mssql/operators.rst +++ b/docs/apache-airflow-providers-microsoft-mssql/operators.rst @@ -41,7 +41,7 @@ The code snippets below are based on Airflow-2.2 An example usage of the SQLExecuteQueryOperator to connect to MSSQL is as follows: -.. exampleinclude:: /../../tests/system/providers/microsoft/mssql/example_mssql.py +.. exampleinclude:: /../../providers/tests/system/microsoft/mssql/example_mssql.py :language: python :start-after: [START howto_operator_mssql] :end-before: [END howto_operator_mssql] @@ -49,7 +49,7 @@ An example usage of the SQLExecuteQueryOperator to connect to MSSQL is as follow You can also use an external file to execute the SQL commands. Script folder must be at the same level as DAG.py file. This way you can easily maintain the SQL queries separated from the code. -.. exampleinclude:: /../../tests/system/providers/microsoft/mssql/example_mssql.py +.. exampleinclude:: /../../providers/tests/system/microsoft/mssql/example_mssql.py :language: python :start-after: [START mssql_operator_howto_guide_create_table_mssql_from_external_file] :end-before: [END mssql_operator_howto_guide_create_table_mssql_from_external_file] @@ -71,7 +71,7 @@ Inserting data into a MSSQL database table --------------------------------------------- We can then create a SQLExecuteQueryOperator task that populate the ``Users`` table. -.. exampleinclude:: /../../tests/system/providers/microsoft/mssql/example_mssql.py +.. exampleinclude:: /../../providers/tests/system/microsoft/mssql/example_mssql.py :language: python :start-after: [START mssql_operator_howto_guide_populate_user_table] :end-before: [END mssql_operator_howto_guide_populate_user_table] @@ -82,7 +82,7 @@ Fetching records from your MSSQL database table Fetching records from your MSSQL database table can be as simple as: -.. exampleinclude:: /../../tests/system/providers/microsoft/mssql/example_mssql.py +.. exampleinclude:: /../../providers/tests/system/microsoft/mssql/example_mssql.py :language: python :start-after: [START mssql_operator_howto_guide_get_all_countries] :end-before: [END mssql_operator_howto_guide_get_all_countries] @@ -96,7 +96,7 @@ SQL requests during runtime. To find the countries in Asian continent: -.. exampleinclude:: /../../tests/system/providers/microsoft/mssql/example_mssql.py +.. exampleinclude:: /../../providers/tests/system/microsoft/mssql/example_mssql.py :language: python :start-after: [START mssql_operator_howto_guide_params_passing_get_query] :end-before: [END mssql_operator_howto_guide_params_passing_get_query] @@ -107,7 +107,7 @@ The complete SQLExecuteQueryOperator DAG to connect to MSSQL When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/microsoft/mssql/example_mssql.py +.. exampleinclude:: /../../providers/tests/system/microsoft/mssql/example_mssql.py :language: python :start-after: [START mssql_operator_howto_guide] :end-before: [END mssql_operator_howto_guide] diff --git a/docs/apache-airflow-providers-microsoft-psrp/changelog.rst b/docs/apache-airflow-providers-microsoft-psrp/changelog.rst index bba78b70734f4..ed4648eded19f 100644 --- a/docs/apache-airflow-providers-microsoft-psrp/changelog.rst +++ b/docs/apache-airflow-providers-microsoft-psrp/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/microsoft/psrp/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/microsoft/psrp/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-microsoft-winrm/changelog.rst b/docs/apache-airflow-providers-microsoft-winrm/changelog.rst index 05d79981df50d..fb0faf44d10fb 100644 --- a/docs/apache-airflow-providers-microsoft-winrm/changelog.rst +++ b/docs/apache-airflow-providers-microsoft-winrm/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/microsoft/winrm/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/microsoft/winrm/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-microsoft-winrm/index.rst b/docs/apache-airflow-providers-microsoft-winrm/index.rst index b3b7cc0afcfb2..6bc6954fe11e6 100644 --- a/docs/apache-airflow-providers-microsoft-winrm/index.rst +++ b/docs/apache-airflow-providers-microsoft-winrm/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/microsoft/winrm/index> + System Tests <_api/tests/system/microsoft/winrm/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-microsoft-winrm/operators.rst b/docs/apache-airflow-providers-microsoft-winrm/operators.rst index e70d124f6262c..2e7fdc6633486 100644 --- a/docs/apache-airflow-providers-microsoft-winrm/operators.rst +++ b/docs/apache-airflow-providers-microsoft-winrm/operators.rst @@ -22,7 +22,7 @@ use the WinRMOperator to execute commands on a given remote host using the winrm create a hook -.. exampleinclude:: /../../tests/system/providers/microsoft/winrm/example_winrm.py +.. exampleinclude:: /../../providers/tests/system/microsoft/winrm/example_winrm.py :language: python :dedent: 4 :start-after: [START create_hook] @@ -30,7 +30,7 @@ create a hook Run the operator, pass the hook, and pass a command to do something -.. exampleinclude:: /../../tests/system/providers/microsoft/winrm/example_winrm.py +.. exampleinclude:: /../../providers/tests/system/microsoft/winrm/example_winrm.py :language: python :dedent: 4 :start-after: [START run_operator] diff --git a/docs/apache-airflow-providers-mongo/changelog.rst b/docs/apache-airflow-providers-mongo/changelog.rst index dc2b2dead5d62..a1714d0e2b49e 100644 --- a/docs/apache-airflow-providers-mongo/changelog.rst +++ b/docs/apache-airflow-providers-mongo/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/mongo/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/mongo/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-mysql/changelog.rst b/docs/apache-airflow-providers-mysql/changelog.rst index 40b5da4fbad92..807ca2ce0730c 100644 --- a/docs/apache-airflow-providers-mysql/changelog.rst +++ b/docs/apache-airflow-providers-mysql/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/mysql/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/mysql/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-mysql/index.rst b/docs/apache-airflow-providers-mysql/index.rst index 622084d88957c..d8d6129134129 100644 --- a/docs/apache-airflow-providers-mysql/index.rst +++ b/docs/apache-airflow-providers-mysql/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/mysql/index> + System Tests <_api/tests/system/mysql/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-mysql/operators.rst b/docs/apache-airflow-providers-mysql/operators.rst index 2ee7b885cd7bc..55b3727822973 100644 --- a/docs/apache-airflow-providers-mysql/operators.rst +++ b/docs/apache-airflow-providers-mysql/operators.rst @@ -53,14 +53,14 @@ the connection metadata is structured as follows: An example usage of the SQLExecuteQueryOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/mysql/example_mysql.py +.. exampleinclude:: /../../providers/tests/system/mysql/example_mysql.py :language: python :start-after: [START howto_operator_mysql] :end-before: [END howto_operator_mysql] You can also use an external file to execute the SQL commands. Script folder must be at the same level as DAG.py file. -.. exampleinclude:: /../../tests/system/providers/mysql/example_mysql.py +.. exampleinclude:: /../../providers/tests/system/mysql/example_mysql.py :language: python :start-after: [START howto_operator_mysql_external_file] :end-before: [END howto_operator_mysql_external_file] diff --git a/docs/apache-airflow-providers-neo4j/changelog.rst b/docs/apache-airflow-providers-neo4j/changelog.rst index 124d526faa220..8b3d87f653f39 100644 --- a/docs/apache-airflow-providers-neo4j/changelog.rst +++ b/docs/apache-airflow-providers-neo4j/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/neo4j/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/neo4j/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-neo4j/index.rst b/docs/apache-airflow-providers-neo4j/index.rst index 83b78bcd892c9..1b78a2bdba8bd 100644 --- a/docs/apache-airflow-providers-neo4j/index.rst +++ b/docs/apache-airflow-providers-neo4j/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/neo4j/index> + System Tests <_api/tests/system/neo4j/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-neo4j/operators/neo4j.rst b/docs/apache-airflow-providers-neo4j/operators/neo4j.rst index 5bc2362e91089..14f3e3da2c93e 100644 --- a/docs/apache-airflow-providers-neo4j/operators/neo4j.rst +++ b/docs/apache-airflow-providers-neo4j/operators/neo4j.rst @@ -49,7 +49,7 @@ the connection metadata is structured as follows: * - Port: int - Neo4j port -.. exampleinclude:: /../../tests/system/providers/neo4j/example_neo4j.py +.. exampleinclude:: /../../providers/tests/system/neo4j/example_neo4j.py :language: python :dedent: 4 :start-after: [START run_query_neo4j_operator] diff --git a/docs/apache-airflow-providers-odbc/changelog.rst b/docs/apache-airflow-providers-odbc/changelog.rst index 1c24fdd8fe6b0..6028c91353b1d 100644 --- a/docs/apache-airflow-providers-odbc/changelog.rst +++ b/docs/apache-airflow-providers-odbc/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/odbc/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/odbc/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-openai/changelog.rst b/docs/apache-airflow-providers-openai/changelog.rst index 64206f909af45..50402f1c9929b 100644 --- a/docs/apache-airflow-providers-openai/changelog.rst +++ b/docs/apache-airflow-providers-openai/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/openai/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/openai/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-openai/index.rst b/docs/apache-airflow-providers-openai/index.rst index ac0130e9d93ed..8e07d82ca6c16 100644 --- a/docs/apache-airflow-providers-openai/index.rst +++ b/docs/apache-airflow-providers-openai/index.rst @@ -52,7 +52,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/openai/index> + System Tests <_api/tests/system/openai/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-openai/operators/openai.rst b/docs/apache-airflow-providers-openai/operators/openai.rst index fef8521188df6..d82a8058f298d 100644 --- a/docs/apache-airflow-providers-openai/operators/openai.rst +++ b/docs/apache-airflow-providers-openai/operators/openai.rst @@ -32,7 +32,7 @@ connect to your account. An example using the operator is in way: -.. exampleinclude:: /../../tests/system/providers/openai/example_openai.py +.. exampleinclude:: /../../providers/tests/system/openai/example_openai.py :language: python :start-after: [START howto_operator_openai_embedding] :end-before: [END howto_operator_openai_embedding] @@ -57,7 +57,7 @@ The OpenAITriggerBatchOperator An example using the operator is in way: -.. exampleinclude:: /../../tests/system/providers/openai/example_trigger_batch_operator.py +.. exampleinclude:: /../../providers/tests/system/openai/example_trigger_batch_operator.py :language: python :start-after: [START howto_operator_openai_trigger_operator] :end-before: [END howto_operator_openai_trigger_operator] diff --git a/docs/apache-airflow-providers-openfaas/changelog.rst b/docs/apache-airflow-providers-openfaas/changelog.rst index 48653876678d8..507f650340ba2 100644 --- a/docs/apache-airflow-providers-openfaas/changelog.rst +++ b/docs/apache-airflow-providers-openfaas/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/openfaas/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/openfaas/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-openlineage/changelog.rst b/docs/apache-airflow-providers-openlineage/changelog.rst index 9c9060dd21a8e..f410ab7a2eaf7 100644 --- a/docs/apache-airflow-providers-openlineage/changelog.rst +++ b/docs/apache-airflow-providers-openlineage/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/openlineage/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/openlineage/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-openlineage/guides/developer.rst b/docs/apache-airflow-providers-openlineage/guides/developer.rst index ccab215fc1846..d826ccde9a5ec 100644 --- a/docs/apache-airflow-providers-openlineage/guides/developer.rst +++ b/docs/apache-airflow-providers-openlineage/guides/developer.rst @@ -290,7 +290,7 @@ To learn more about how Operators and Extractors work together under the hood, c When testing an Extractor, we want to firstly verify if ``OperatorLineage`` object is being created, specifically verifying that the object is being built with the correct input and output datasets and relevant facets. This is done in OpenLineage via pytest, with appropriate mocking and patching for connections and objects. -Check out `example tests `_. +Check out `example tests `_. Testing each facet is also important, as data or graphs in the UI can render incorrectly if the facets are wrong. For example, if the facet name is created incorrectly in the Extractor, then the Operator's task will not show up in the lineage graph, diff --git a/docs/apache-airflow-providers-opensearch/changelog.rst b/docs/apache-airflow-providers-opensearch/changelog.rst index 2ce00fd5fdcff..21f39a6c1f6ea 100644 --- a/docs/apache-airflow-providers-opensearch/changelog.rst +++ b/docs/apache-airflow-providers-opensearch/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/opensearch/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/opensearch/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-opensearch/index.rst b/docs/apache-airflow-providers-opensearch/index.rst index de79657141fad..efa238c993ef1 100644 --- a/docs/apache-airflow-providers-opensearch/index.rst +++ b/docs/apache-airflow-providers-opensearch/index.rst @@ -51,7 +51,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/opensearch/index> + System Tests <_api/tests/system/opensearch/index> .. toctree:: :hidden: diff --git a/docs/apache-airflow-providers-opensearch/operators/opensearch.rst b/docs/apache-airflow-providers-opensearch/operators/opensearch.rst index cd39a59566fbb..b85a014ebefee 100644 --- a/docs/apache-airflow-providers-opensearch/operators/opensearch.rst +++ b/docs/apache-airflow-providers-opensearch/operators/opensearch.rst @@ -35,7 +35,7 @@ to create a new index in an OpenSearch domain. -.. exampleinclude:: /../../tests/system/providers/opensearch/example_opensearch.py +.. exampleinclude:: /../../providers/tests/system/opensearch/example_opensearch.py :language: python :start-after: [START howto_operator_opensearch_create_index] :dedent: 4 @@ -50,7 +50,7 @@ Add a Document to an Index on OpenSearch Use :class:`~airflow.providers.opensearch.operators.opensearch.OpenSearchAddDocumentOperator` to add single documents to an OpenSearch Index -.. exampleinclude:: /../../tests/system/providers/opensearch/example_opensearch.py +.. exampleinclude:: /../../providers/tests/system/opensearch/example_opensearch.py :language: python :start-after: [START howto_operator_opensearch_add_document] :dedent: 4 @@ -65,7 +65,7 @@ Run a query against an OpenSearch Index Use :class:`~airflow.providers.opensearch.operators.opensearch.OpenSearchQueryOperator` to run a query against an OpenSearch index. -.. exampleinclude:: /../../tests/system/providers/opensearch/example_opensearch.py +.. exampleinclude:: /../../providers/tests/system/opensearch/example_opensearch.py :language: python :start-after: [START howto_operator_opensearch_query] :dedent: 4 diff --git a/docs/apache-airflow-providers-opsgenie/changelog.rst b/docs/apache-airflow-providers-opsgenie/changelog.rst index 4c9f227f1dd62..abb3ab86138c5 100644 --- a/docs/apache-airflow-providers-opsgenie/changelog.rst +++ b/docs/apache-airflow-providers-opsgenie/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/opsgenie/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/opsgenie/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-opsgenie/index.rst b/docs/apache-airflow-providers-opsgenie/index.rst index b09e294bbc85e..30b35890e97c2 100644 --- a/docs/apache-airflow-providers-opsgenie/index.rst +++ b/docs/apache-airflow-providers-opsgenie/index.rst @@ -50,14 +50,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/opsgenie/index> + System Tests <_api/tests/system/opsgenie/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-opsgenie/notifications/opsgenie_notifier.rst b/docs/apache-airflow-providers-opsgenie/notifications/opsgenie_notifier.rst index 54cff970d4a68..2172be24faa41 100644 --- a/docs/apache-airflow-providers-opsgenie/notifications/opsgenie_notifier.rst +++ b/docs/apache-airflow-providers-opsgenie/notifications/opsgenie_notifier.rst @@ -27,7 +27,7 @@ Using the Notifier ^^^^^^^^^^^^^^^^^^ Send an alert to Opsgenie with a specific message. -.. exampleinclude:: /../../tests/system/providers/opsgenie/example_opsgenie_notifier.py +.. exampleinclude:: /../../providers/tests/system/opsgenie/example_opsgenie_notifier.py :language: python :start-after: [START howto_notifier_opsgenie] :end-before: [END howto_notifier_opsgenie] diff --git a/docs/apache-airflow-providers-opsgenie/operators/opsgenie_alert.rst b/docs/apache-airflow-providers-opsgenie/operators/opsgenie_alert.rst index 5ba7cb6a8952d..ebed7548c4f03 100644 --- a/docs/apache-airflow-providers-opsgenie/operators/opsgenie_alert.rst +++ b/docs/apache-airflow-providers-opsgenie/operators/opsgenie_alert.rst @@ -27,7 +27,7 @@ Using the Operator ^^^^^^^^^^^^^^^^^^ Send an alert to Opsgenie with a specific message. -.. exampleinclude:: /../../tests/system/providers/opsgenie/example_opsgenie_alert.py +.. exampleinclude:: /../../providers/tests/system/opsgenie/example_opsgenie_alert.py :language: python :start-after: [START howto_opsgenie_create_alert_operator] :end-before: [END howto_opsgenie_create_alert_operator] @@ -44,7 +44,7 @@ Using the Operator ^^^^^^^^^^^^^^^^^^ Close alert in Opsgenie. -.. exampleinclude:: /../../tests/system/providers/opsgenie/example_opsgenie_alert.py +.. exampleinclude:: /../../providers/tests/system/opsgenie/example_opsgenie_alert.py :language: python :start-after: [START howto_opsgenie_close_alert_operator] :end-before: [END howto_opsgenie_close_alert_operator] @@ -61,7 +61,7 @@ Using the Operator ^^^^^^^^^^^^^^^^^^ Delete alert in Opsgenie. -.. exampleinclude:: /../../tests/system/providers/opsgenie/example_opsgenie_alert.py +.. exampleinclude:: /../../providers/tests/system/opsgenie/example_opsgenie_alert.py :language: python :dedent: 4 :start-after: [START howto_opsgenie_delete_alert_operator] diff --git a/docs/apache-airflow-providers-oracle/changelog.rst b/docs/apache-airflow-providers-oracle/changelog.rst index 4711449e6c6ce..b6cdc5037ba7c 100644 --- a/docs/apache-airflow-providers-oracle/changelog.rst +++ b/docs/apache-airflow-providers-oracle/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/oracle/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/oracle/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-oracle/operators/index.rst b/docs/apache-airflow-providers-oracle/operators/index.rst index af440c7d19b3c..990cb2e6503b2 100644 --- a/docs/apache-airflow-providers-oracle/operators/index.rst +++ b/docs/apache-airflow-providers-oracle/operators/index.rst @@ -30,7 +30,7 @@ To execute arbitrary SQL in an Oracle database, use the An example of executing a simple query is as follows: -.. exampleinclude:: /../../airflow/providers/oracle/example_dags/example_oracle.py +.. exampleinclude:: /../../providers/src/airflow/providers/oracle/example_dags/example_oracle.py :language: python :start-after: [START howto_oracle_operator] :end-before: [END howto_oracle_operator] @@ -58,7 +58,7 @@ a single integer argument, val_out. This can be represented with the following call using :class:`~airflow.providers.oracle.operators.oracle.OracleStoredProcedureOperator` with parameters passed positionally as a list: -.. exampleinclude:: /../../airflow/providers/oracle/example_dags/example_oracle.py +.. exampleinclude:: /../../providers/src/airflow/providers/oracle/example_dags/example_oracle.py :language: python :start-after: [START howto_oracle_stored_procedure_operator_with_list_inout] :end-before: [END howto_oracle_stored_procedure_operator_with_list_inout] @@ -67,7 +67,7 @@ with parameters passed positionally as a list: Alternatively, parameters can be passed as keyword arguments using a dictionary as well. -.. exampleinclude:: /../../airflow/providers/oracle/example_dags/example_oracle.py +.. exampleinclude:: /../../providers/src/airflow/providers/oracle/example_dags/example_oracle.py :language: python :start-after: [START howto_oracle_stored_procedure_operator_with_dict_inout] :end-before: [END howto_oracle_stored_procedure_operator_with_dict_inout] diff --git a/docs/apache-airflow-providers-pagerduty/changelog.rst b/docs/apache-airflow-providers-pagerduty/changelog.rst index ddd707b84f9fa..049844772748b 100644 --- a/docs/apache-airflow-providers-pagerduty/changelog.rst +++ b/docs/apache-airflow-providers-pagerduty/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/pagerduty/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/pagerduty/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-papermill/changelog.rst b/docs/apache-airflow-providers-papermill/changelog.rst index c022939a40099..3c11af574b2fe 100644 --- a/docs/apache-airflow-providers-papermill/changelog.rst +++ b/docs/apache-airflow-providers-papermill/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/papermill/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/papermill/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-papermill/index.rst b/docs/apache-airflow-providers-papermill/index.rst index b32d63bc9c89d..5217f7119e332 100644 --- a/docs/apache-airflow-providers-papermill/index.rst +++ b/docs/apache-airflow-providers-papermill/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/papermill/index> + System Tests <_api/tests/system/papermill/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-papermill/operators.rst b/docs/apache-airflow-providers-papermill/operators.rst index ed1cf580c85a4..fd17fba350602 100644 --- a/docs/apache-airflow-providers-papermill/operators.rst +++ b/docs/apache-airflow-providers-papermill/operators.rst @@ -50,7 +50,7 @@ Example DAG Use the :class:`~airflow.providers.papermill.operators.papermill.PapermillOperator` to execute a jupyter notebook: -.. exampleinclude:: /../../tests/system/providers/papermill/example_papermill.py +.. exampleinclude:: /../../providers/tests/system/papermill/example_papermill.py :language: python :dedent: 4 :start-after: [START howto_operator_papermill] @@ -58,7 +58,7 @@ to execute a jupyter notebook: Example DAG to Verify the message in the notebook: -.. exampleinclude:: /../../tests/system/providers/papermill/example_papermill_verify.py +.. exampleinclude:: /../../providers/tests/system/papermill/example_papermill_verify.py :language: python :start-after: [START howto_verify_operator_papermill] :end-before: [END howto_verify_operator_papermill] @@ -66,7 +66,7 @@ Example DAG to Verify the message in the notebook: Example DAG to Verify the message in the notebook using a remote jupyter kernel: -.. exampleinclude:: /../../tests/system/providers/papermill/example_papermill_remote_verify.py +.. exampleinclude:: /../../providers/tests/system/papermill/example_papermill_remote_verify.py :language: python :start-after: [START howto_verify_operator_papermill_remote_kernel] :end-before: [END howto_verify_operator_papermill_remote_kernel] diff --git a/docs/apache-airflow-providers-pgvector/changelog.rst b/docs/apache-airflow-providers-pgvector/changelog.rst index 9e33a9e9e6065..15c377d7cfc85 100644 --- a/docs/apache-airflow-providers-pgvector/changelog.rst +++ b/docs/apache-airflow-providers-pgvector/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/pgvector/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/pgvector/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-pgvector/index.rst b/docs/apache-airflow-providers-pgvector/index.rst index a7b280a9981e5..18d69634b876e 100644 --- a/docs/apache-airflow-providers-pgvector/index.rst +++ b/docs/apache-airflow-providers-pgvector/index.rst @@ -60,7 +60,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/pgvector/index> + System Tests <_api/tests/system/pgvector/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-pgvector/operators/pgvector.rst b/docs/apache-airflow-providers-pgvector/operators/pgvector.rst index dfee1c9fef2a5..b8ef6b27fccee 100644 --- a/docs/apache-airflow-providers-pgvector/operators/pgvector.rst +++ b/docs/apache-airflow-providers-pgvector/operators/pgvector.rst @@ -36,7 +36,7 @@ See https://github.com/pgvector/pgvector#installation for installation instructi An example using the operator to ingest data is shown below: -.. exampleinclude:: /../../tests/system/providers/pgvector/example_pgvector.py +.. exampleinclude:: /../../providers/tests/system/pgvector/example_pgvector.py :language: python :start-after: [START howto_operator_pgvector_ingest] :end-before: [END howto_operator_pgvector_ingest] diff --git a/docs/apache-airflow-providers-pinecone/changelog.rst b/docs/apache-airflow-providers-pinecone/changelog.rst index 73d9f14c9a674..4d21af53ed850 100644 --- a/docs/apache-airflow-providers-pinecone/changelog.rst +++ b/docs/apache-airflow-providers-pinecone/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/pinecone/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/pinecone/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-pinecone/index.rst b/docs/apache-airflow-providers-pinecone/index.rst index ea00879975c9c..8c329abde18e6 100644 --- a/docs/apache-airflow-providers-pinecone/index.rst +++ b/docs/apache-airflow-providers-pinecone/index.rst @@ -52,7 +52,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/pinecone/index> + System Tests <_api/tests/system/pinecone/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-pinecone/operators/pinecone.rst b/docs/apache-airflow-providers-pinecone/operators/pinecone.rst index b50e5300f09a9..0f8e49442fe5d 100644 --- a/docs/apache-airflow-providers-pinecone/operators/pinecone.rst +++ b/docs/apache-airflow-providers-pinecone/operators/pinecone.rst @@ -36,7 +36,7 @@ the original text corresponding to the vectors that could be ingested into the d An example using the operator in this way: -.. exampleinclude:: /../../tests/system/providers/pinecone/example_dag_pinecone.py +.. exampleinclude:: /../../providers/tests/system/pinecone/example_dag_pinecone.py :language: python :dedent: 4 :start-after: [START howto_operator_pinecone_ingest] @@ -58,7 +58,7 @@ passed via arguments to the operator or via the connection. An example using the operator in this way: -.. exampleinclude:: /../../tests/system/providers/pinecone/example_create_pod_index.py +.. exampleinclude:: /../../providers/tests/system/pinecone/example_create_pod_index.py :language: python :dedent: 4 :start-after: [START howto_operator_create_pod_index] @@ -81,7 +81,7 @@ passed via arguments to the operator or via the connection. An example using the operator in this way: -.. exampleinclude:: /../../tests/system/providers/pinecone/example_create_serverless_index.py +.. exampleinclude:: /../../providers/tests/system/pinecone/example_create_serverless_index.py :language: python :dedent: 4 :start-after: [START howto_operator_create_serverless_index] diff --git a/docs/apache-airflow-providers-postgres/changelog.rst b/docs/apache-airflow-providers-postgres/changelog.rst index f82d2144e7b0a..d2d34f24db829 100644 --- a/docs/apache-airflow-providers-postgres/changelog.rst +++ b/docs/apache-airflow-providers-postgres/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/postgres/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/postgres/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-postgres/index.rst b/docs/apache-airflow-providers-postgres/index.rst index 6fbd69ba1c5aa..2864276f37893 100644 --- a/docs/apache-airflow-providers-postgres/index.rst +++ b/docs/apache-airflow-providers-postgres/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/postgres/index> + System Tests <_api/tests/system/postgres/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-postgres/operators/postgres_operator_howto_guide.rst b/docs/apache-airflow-providers-postgres/operators/postgres_operator_howto_guide.rst index 09402178aa057..d1b20dcea172f 100644 --- a/docs/apache-airflow-providers-postgres/operators/postgres_operator_howto_guide.rst +++ b/docs/apache-airflow-providers-postgres/operators/postgres_operator_howto_guide.rst @@ -45,7 +45,7 @@ Creating a Postgres database table The code snippets below are based on Airflow-2.0 -.. exampleinclude:: /../../tests/system/providers/postgres/example_postgres.py +.. exampleinclude:: /../../providers/tests/system/postgres/example_postgres.py :language: python :start-after: [START postgres_sql_execute_query_operator_howto_guide] :end-before: [END postgres_sql_execute_query_operator_howto_guide_create_pet_table] @@ -181,7 +181,7 @@ SQLExecuteQueryOperator provides ``hook_params`` attribute that allows you to pa You can pass ``options`` argument this way so that you specify `command-line options `_ sent to the server at connection start. -.. exampleinclude:: /../../tests/system/providers/postgres/example_postgres.py +.. exampleinclude:: /../../providers/tests/system/postgres/example_postgres.py :language: python :start-after: [START postgres_sql_execute_query_operator_howto_guide_get_birth_date] :end-before: [END postgres_sql_execute_query_operator_howto_guide_get_birth_date] @@ -192,7 +192,7 @@ The complete Postgres Operator DAG When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/postgres/example_postgres.py +.. exampleinclude:: /../../providers/tests/system/postgres/example_postgres.py :language: python :start-after: [START postgres_sql_execute_query_operator_howto_guide] :end-before: [END postgres_sql_execute_query_operator_howto_guide] diff --git a/docs/apache-airflow-providers-presto/changelog.rst b/docs/apache-airflow-providers-presto/changelog.rst index e430ef542f538..7cd5d865d7c23 100644 --- a/docs/apache-airflow-providers-presto/changelog.rst +++ b/docs/apache-airflow-providers-presto/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/presto/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/presto/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-presto/index.rst b/docs/apache-airflow-providers-presto/index.rst index 9d814fde8d915..a67acce0f81bc 100644 --- a/docs/apache-airflow-providers-presto/index.rst +++ b/docs/apache-airflow-providers-presto/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/presto/index> + System Tests <_api/tests/system/presto/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-presto/operators/transfer/gcs_to_presto.rst b/docs/apache-airflow-providers-presto/operators/transfer/gcs_to_presto.rst index de717cdaee8d2..d85844956039b 100644 --- a/docs/apache-airflow-providers-presto/operators/transfer/gcs_to_presto.rst +++ b/docs/apache-airflow-providers-presto/operators/transfer/gcs_to_presto.rst @@ -39,7 +39,7 @@ This operator assumes that CSV does not have headers and the data is correspondi pre-existing presto table. Optionally, you can provide schema as tuple/list of strings or as a path to a JSON file in the same bucket as the CSV file. -.. exampleinclude:: /../../tests/system/providers/presto/example_gcs_to_presto.py +.. exampleinclude:: /../../providers/tests/system/presto/example_gcs_to_presto.py :language: python :dedent: 4 :start-after: [START gcs_csv_to_presto_table] diff --git a/docs/apache-airflow-providers-qdrant/changelog.rst b/docs/apache-airflow-providers-qdrant/changelog.rst index 56dd0a8b55a8e..b6079158bc81c 100644 --- a/docs/apache-airflow-providers-qdrant/changelog.rst +++ b/docs/apache-airflow-providers-qdrant/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/qdrant/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/qdrant/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-qdrant/index.rst b/docs/apache-airflow-providers-qdrant/index.rst index 774dbaab014a1..32ce6934c872d 100644 --- a/docs/apache-airflow-providers-qdrant/index.rst +++ b/docs/apache-airflow-providers-qdrant/index.rst @@ -51,7 +51,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/qdrant/index> + System Tests <_api/tests/system/qdrant/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-qdrant/operators/qdrant.rst b/docs/apache-airflow-providers-qdrant/operators/qdrant.rst index 22d82d5958f12..5e7bbda10a645 100644 --- a/docs/apache-airflow-providers-qdrant/operators/qdrant.rst +++ b/docs/apache-airflow-providers-qdrant/operators/qdrant.rst @@ -33,7 +33,7 @@ the original text corresponding to the vectors that could be ingested into the d An example using the operator in this way: -.. exampleinclude:: /../../tests/system/providers/qdrant/example_dag_qdrant.py +.. exampleinclude:: /../../providers/tests/system/qdrant/example_dag_qdrant.py :language: python :dedent: 4 :start-after: [START howto_operator_qdrant_ingest] diff --git a/docs/apache-airflow-providers-redis/changelog.rst b/docs/apache-airflow-providers-redis/changelog.rst index 2fe7da2668e99..fa694a4a9b8d6 100644 --- a/docs/apache-airflow-providers-redis/changelog.rst +++ b/docs/apache-airflow-providers-redis/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/redis/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/redis/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-redis/index.rst b/docs/apache-airflow-providers-redis/index.rst index 1231aa2df3e98..fe9b374f68833 100644 --- a/docs/apache-airflow-providers-redis/index.rst +++ b/docs/apache-airflow-providers-redis/index.rst @@ -49,7 +49,7 @@ :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources @@ -58,7 +58,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/redis/index> + System Tests <_api/tests/system/redis/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-salesforce/changelog.rst b/docs/apache-airflow-providers-salesforce/changelog.rst index 192eeeb3d5ed6..ba7063ba2d814 100644 --- a/docs/apache-airflow-providers-salesforce/changelog.rst +++ b/docs/apache-airflow-providers-salesforce/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/salesforce/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/salesforce/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-salesforce/index.rst b/docs/apache-airflow-providers-salesforce/index.rst index cf4ea7a539bb5..610be66bbc277 100644 --- a/docs/apache-airflow-providers-salesforce/index.rst +++ b/docs/apache-airflow-providers-salesforce/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/salesforce/index> + System Tests <_api/tests/system/salesforce/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-salesforce/operators/bulk.rst b/docs/apache-airflow-providers-salesforce/operators/bulk.rst index 037f819c3c7bd..8e1bdd8415c91 100644 --- a/docs/apache-airflow-providers-salesforce/operators/bulk.rst +++ b/docs/apache-airflow-providers-salesforce/operators/bulk.rst @@ -28,7 +28,7 @@ Using the Operator You can use this operator to access Bulk Insert API: -.. exampleinclude:: /../../tests/system/providers/salesforce/example_bulk.py +.. exampleinclude:: /../../providers/tests/system/salesforce/example_bulk.py :language: python :dedent: 4 :start-after: [START howto_salesforce_bulk_insert_operation] @@ -36,7 +36,7 @@ You can use this operator to access Bulk Insert API: You can use this operator to access Bulk Update API: -.. exampleinclude:: /../../tests/system/providers/salesforce/example_bulk.py +.. exampleinclude:: /../../providers/tests/system/salesforce/example_bulk.py :language: python :dedent: 4 :start-after: [START howto_salesforce_bulk_update_operation] @@ -44,7 +44,7 @@ You can use this operator to access Bulk Update API: You can use this operator to access Bulk Upsert API: -.. exampleinclude:: /../../tests/system/providers/salesforce/example_bulk.py +.. exampleinclude:: /../../providers/tests/system/salesforce/example_bulk.py :language: python :dedent: 4 :start-after: [START howto_salesforce_bulk_upsert_operation] @@ -52,7 +52,7 @@ You can use this operator to access Bulk Upsert API: You can use this operator to access Bulk Delete API: -.. exampleinclude:: /../../tests/system/providers/salesforce/example_bulk.py +.. exampleinclude:: /../../providers/tests/system/salesforce/example_bulk.py :language: python :dedent: 4 :start-after: [START howto_salesforce_bulk_delete_operation] diff --git a/docs/apache-airflow-providers-salesforce/operators/salesforce_apex_rest.rst b/docs/apache-airflow-providers-salesforce/operators/salesforce_apex_rest.rst index 00b4b4c643434..372a4efff538d 100644 --- a/docs/apache-airflow-providers-salesforce/operators/salesforce_apex_rest.rst +++ b/docs/apache-airflow-providers-salesforce/operators/salesforce_apex_rest.rst @@ -30,7 +30,7 @@ You can also use this library to call custom Apex methods: This would call the endpoint ``https://.salesforce.com/services/apexrest/User/Activity`` with ``payload`` as the body content encoded with ``json.dumps`` -.. exampleinclude:: /../../tests/system/providers/salesforce/example_salesforce_apex_rest.py +.. exampleinclude:: /../../providers/tests/system/salesforce/example_salesforce_apex_rest.py :language: python :start-after: [START howto_salesforce_apex_rest_operator] :end-before: [END howto_salesforce_apex_rest_operator] diff --git a/docs/apache-airflow-providers-samba/changelog.rst b/docs/apache-airflow-providers-samba/changelog.rst index e22abbadb41c1..8bcfbb1b1f012 100644 --- a/docs/apache-airflow-providers-samba/changelog.rst +++ b/docs/apache-airflow-providers-samba/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/samba/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/samba/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-samba/index.rst b/docs/apache-airflow-providers-samba/index.rst index a252f74481e84..11ac45f8c1d97 100644 --- a/docs/apache-airflow-providers-samba/index.rst +++ b/docs/apache-airflow-providers-samba/index.rst @@ -57,7 +57,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/samba/index> + System Tests <_api/tests/system/samba/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-samba/transfer/gcs_to_samba.rst b/docs/apache-airflow-providers-samba/transfer/gcs_to_samba.rst index c63f468bd665e..86f6de1926ef3 100644 --- a/docs/apache-airflow-providers-samba/transfer/gcs_to_samba.rst +++ b/docs/apache-airflow-providers-samba/transfer/gcs_to_samba.rst @@ -42,7 +42,7 @@ Copying a single file The following Operator copies a single file. -.. exampleinclude:: /../../tests/system/providers/samba/example_gcs_to_samba.py +.. exampleinclude:: /../../providers/tests/system/samba/example_gcs_to_samba.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_samba_copy_single_file] @@ -55,7 +55,7 @@ To move the file use the ``move_object`` parameter. Once the file is copied to S the original file from the Google Storage is deleted. The ``destination_path`` parameter defines the full path of the file on the Samba server. -.. exampleinclude:: /../../tests/system/providers/samba/example_gcs_to_samba.py +.. exampleinclude:: /../../providers/tests/system/samba/example_gcs_to_samba.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_samba_move_single_file_destination] @@ -67,7 +67,7 @@ Copying a directory Use the ``wildcard`` in ``source_path`` parameter to copy a directory. -.. exampleinclude:: /../../tests/system/providers/samba/example_gcs_to_samba.py +.. exampleinclude:: /../../providers/tests/system/samba/example_gcs_to_samba.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_samba_copy_directory] @@ -79,7 +79,7 @@ Moving specific files Use the ``wildcard`` in ``source_path`` parameter to move the specific files. The ``destination_path`` defines the path that is prefixed to all copied files. -.. exampleinclude:: /../../tests/system/providers/samba/example_gcs_to_samba.py +.. exampleinclude:: /../../providers/tests/system/samba/example_gcs_to_samba.py :language: python :dedent: 4 :start-after: [START howto_operator_gcs_to_samba_move_specific_files] diff --git a/docs/apache-airflow-providers-segment/changelog.rst b/docs/apache-airflow-providers-segment/changelog.rst index 9a9e27c111972..b8eafd739d1fa 100644 --- a/docs/apache-airflow-providers-segment/changelog.rst +++ b/docs/apache-airflow-providers-segment/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/segment/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/segment/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-sendgrid/changelog.rst b/docs/apache-airflow-providers-sendgrid/changelog.rst index 43ddc029119e1..913b7477b13a2 100644 --- a/docs/apache-airflow-providers-sendgrid/changelog.rst +++ b/docs/apache-airflow-providers-sendgrid/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/sendgrid/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/sendgrid/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-sftp/changelog.rst b/docs/apache-airflow-providers-sftp/changelog.rst index 2ed184d07da7c..3915d213a711c 100644 --- a/docs/apache-airflow-providers-sftp/changelog.rst +++ b/docs/apache-airflow-providers-sftp/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/sftp/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/sftp/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-sftp/index.rst b/docs/apache-airflow-providers-sftp/index.rst index 9acb9773fadc1..a69f41a678800 100644 --- a/docs/apache-airflow-providers-sftp/index.rst +++ b/docs/apache-airflow-providers-sftp/index.rst @@ -51,7 +51,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/sftp/index> + System Tests <_api/tests/system/sftp/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-sftp/sensors/sftp_sensor.rst b/docs/apache-airflow-providers-sftp/sensors/sftp_sensor.rst index b99bc36672d36..32f2a20696a73 100644 --- a/docs/apache-airflow-providers-sftp/sensors/sftp_sensor.rst +++ b/docs/apache-airflow-providers-sftp/sensors/sftp_sensor.rst @@ -21,7 +21,7 @@ SFTP Sensor Looks for either a specific file or files with a specific pattern in a server using SFTP protocol. To get more information about this sensor visit :class:`~airflow.providers.sftp.sensors.sftp.SFTPSensor` -.. exampleinclude:: /../../tests/system/providers/sftp/example_sftp_sensor.py +.. exampleinclude:: /../../providers/tests/system/sftp/example_sftp_sensor.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_sensor] @@ -39,7 +39,7 @@ op_kwargs (optional) Whatever returned by the python callable is put into XCom. -.. exampleinclude:: /../../tests/system/providers/sftp/example_sftp_sensor.py +.. exampleinclude:: /../../providers/tests/system/sftp/example_sftp_sensor.py :language: python :dedent: 4 :start-after: [START howto_operator_sftp_sensor_decorator] @@ -47,7 +47,7 @@ Whatever returned by the python callable is put into XCom. Checks for the existence of a file on an SFTP server in the deferrable mode: -.. exampleinclude:: /../../tests/system/providers/sftp/example_sftp_sensor.py +.. exampleinclude:: /../../providers/tests/system/sftp/example_sftp_sensor.py :language: python :dedent: 4 :start-after: [START howto_sensor_sftp_deferrable] diff --git a/docs/apache-airflow-providers-singularity/changelog.rst b/docs/apache-airflow-providers-singularity/changelog.rst index 1da16678d8e1b..eaa3226835fd6 100644 --- a/docs/apache-airflow-providers-singularity/changelog.rst +++ b/docs/apache-airflow-providers-singularity/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/singularity/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/singularity/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-singularity/index.rst b/docs/apache-airflow-providers-singularity/index.rst index 3dd7c559bc833..656a7a64a27a6 100644 --- a/docs/apache-airflow-providers-singularity/index.rst +++ b/docs/apache-airflow-providers-singularity/index.rst @@ -41,14 +41,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/singularity/index> + System Tests <_api/tests/system/singularity/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-slack/changelog.rst b/docs/apache-airflow-providers-slack/changelog.rst index ec3a2f20d6577..75fb0c94dfe75 100644 --- a/docs/apache-airflow-providers-slack/changelog.rst +++ b/docs/apache-airflow-providers-slack/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/slack/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/slack/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-slack/index.rst b/docs/apache-airflow-providers-slack/index.rst index f9e6e02e9fd19..7fca57690168d 100644 --- a/docs/apache-airflow-providers-slack/index.rst +++ b/docs/apache-airflow-providers-slack/index.rst @@ -34,7 +34,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/slack/index> + System Tests <_api/tests/system/slack/index> .. toctree:: :hidden: @@ -51,7 +51,7 @@ :caption: References Python API <_api/airflow/providers/slack/index> - Example DAGs + Example DAGs .. toctree:: :hidden: diff --git a/docs/apache-airflow-providers-slack/operators/slack_api.rst b/docs/apache-airflow-providers-slack/operators/slack_api.rst index 6fafa46f79418..7ab98f6b7eb81 100644 --- a/docs/apache-airflow-providers-slack/operators/slack_api.rst +++ b/docs/apache-airflow-providers-slack/operators/slack_api.rst @@ -34,7 +34,7 @@ Using the Operator You could send simple text message -.. exampleinclude:: /../../tests/system/providers/slack/example_slack.py +.. exampleinclude:: /../../providers/tests/system/slack/example_slack.py :language: python :dedent: 4 :start-after: [START slack_api_post_operator_text_howto_guide] @@ -43,7 +43,7 @@ You could send simple text message Or you could use `Block Kit `_ for create app layouts -.. exampleinclude:: /../../tests/system/providers/slack/example_slack.py +.. exampleinclude:: /../../providers/tests/system/slack/example_slack.py :language: python :dedent: 4 :start-after: [START slack_api_post_operator_blocks_howto_guide] @@ -81,7 +81,7 @@ Using the Operator You could send file attachment by specifying file path -.. exampleinclude:: /../../tests/system/providers/slack/example_slack.py +.. exampleinclude:: /../../providers/tests/system/slack/example_slack.py :language: python :start-after: [START slack_api_file_operator_howto_guide] :end-before: [END slack_api_file_operator_howto_guide] @@ -89,7 +89,7 @@ You could send file attachment by specifying file path Or by directly providing file contents -.. exampleinclude:: /../../tests/system/providers/slack/example_slack.py +.. exampleinclude:: /../../providers/tests/system/slack/example_slack.py :language: python :start-after: [START slack_api_file_operator_content_howto_guide] :end-before: [END slack_api_file_operator_content_howto_guide] diff --git a/docs/apache-airflow-providers-slack/operators/slack_webhook.rst b/docs/apache-airflow-providers-slack/operators/slack_webhook.rst index aded2a76bb7c6..0b599312ba61c 100644 --- a/docs/apache-airflow-providers-slack/operators/slack_webhook.rst +++ b/docs/apache-airflow-providers-slack/operators/slack_webhook.rst @@ -30,7 +30,7 @@ Using the Operator You could send simple text message -.. exampleinclude:: /../../tests/system/providers/slack/example_slack_webhook.py +.. exampleinclude:: /../../providers/tests/system/slack/example_slack_webhook.py :language: python :dedent: 4 :start-after: [START slack_webhook_operator_text_howto_guide] @@ -39,7 +39,7 @@ You could send simple text message Or you could use `Block Kit `_ for create app layouts -.. exampleinclude:: /../../tests/system/providers/slack/example_slack_webhook.py +.. exampleinclude:: /../../providers/tests/system/slack/example_slack_webhook.py :language: python :dedent: 4 :start-after: [START slack_webhook_operator_blocks_howto_guide] diff --git a/docs/apache-airflow-providers-slack/operators/sql_to_slack.rst b/docs/apache-airflow-providers-slack/operators/sql_to_slack.rst index 3182fd7e0b785..5da6a2bd593d5 100644 --- a/docs/apache-airflow-providers-slack/operators/sql_to_slack.rst +++ b/docs/apache-airflow-providers-slack/operators/sql_to_slack.rst @@ -51,7 +51,7 @@ This operator will execute a custom query in the provided SQL connection and pub An example usage of the SqlToSlackApiFileOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/slack/example_sql_to_slack.py +.. exampleinclude:: /../../providers/tests/system/slack/example_sql_to_slack.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_to_slack_api_file] diff --git a/docs/apache-airflow-providers-slack/operators/sql_to_slack_webhook.rst b/docs/apache-airflow-providers-slack/operators/sql_to_slack_webhook.rst index 3ad72e2a9ad77..aa46fe7b03509 100644 --- a/docs/apache-airflow-providers-slack/operators/sql_to_slack_webhook.rst +++ b/docs/apache-airflow-providers-slack/operators/sql_to_slack_webhook.rst @@ -31,7 +31,7 @@ and contain the resulting dataset (e.g. ASCII formatted dataframe). An example usage of the SqlToSlackWebhookOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/slack/example_sql_to_slack_webhook.py +.. exampleinclude:: /../../providers/tests/system/slack/example_sql_to_slack_webhook.py :language: python :dedent: 4 :start-after: [START howto_operator_sql_to_slack_webhook] diff --git a/docs/apache-airflow-providers-smtp/changelog.rst b/docs/apache-airflow-providers-smtp/changelog.rst index 105ae30115633..3ada8a5cf420d 100644 --- a/docs/apache-airflow-providers-smtp/changelog.rst +++ b/docs/apache-airflow-providers-smtp/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/smtp/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/smtp/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-snowflake/changelog.rst b/docs/apache-airflow-providers-snowflake/changelog.rst index 6ccd3e61dc37a..932a260df7188 100644 --- a/docs/apache-airflow-providers-snowflake/changelog.rst +++ b/docs/apache-airflow-providers-snowflake/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/snowflake/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/snowflake/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-snowflake/decorators/snowpark.rst b/docs/apache-airflow-providers-snowflake/decorators/snowpark.rst index 09be01e3efc8f..947cfec52b5e4 100644 --- a/docs/apache-airflow-providers-snowflake/decorators/snowpark.rst +++ b/docs/apache-airflow-providers-snowflake/decorators/snowpark.rst @@ -51,7 +51,7 @@ Use the ``snowflake_conn_id`` argument to specify connection used. If not specif An example usage of the ``@task.snowpark`` is as follows: -.. exampleinclude:: /../../tests/system/providers/snowflake/example_snowpark_decorator.py +.. exampleinclude:: /../../providers/tests/system/snowflake/example_snowpark_decorator.py :language: python :start-after: [START howto_decorator_snowpark] :end-before: [END howto_decorator_snowpark] diff --git a/docs/apache-airflow-providers-snowflake/index.rst b/docs/apache-airflow-providers-snowflake/index.rst index c9746f7806943..c9f3b8f8872ef 100644 --- a/docs/apache-airflow-providers-snowflake/index.rst +++ b/docs/apache-airflow-providers-snowflake/index.rst @@ -50,14 +50,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/snowflake/index> + System Tests <_api/tests/system/snowflake/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-snowflake/operators/copy_into_snowflake.rst b/docs/apache-airflow-providers-snowflake/operators/copy_into_snowflake.rst index 49fca623f2f9e..30236121abbb3 100644 --- a/docs/apache-airflow-providers-snowflake/operators/copy_into_snowflake.rst +++ b/docs/apache-airflow-providers-snowflake/operators/copy_into_snowflake.rst @@ -43,7 +43,7 @@ a file format (see `docs + System Tests <_api/tests/system/sqlite/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-sqlite/operators.rst b/docs/apache-airflow-providers-sqlite/operators.rst index 472ec71452499..5b113162ae4c3 100644 --- a/docs/apache-airflow-providers-sqlite/operators.rst +++ b/docs/apache-airflow-providers-sqlite/operators.rst @@ -45,14 +45,14 @@ the connection metadata is structured as follows: An example usage of the SQLExecuteQueryOperator to connect to Sqlite is as follows: -.. exampleinclude:: /../../tests/system/providers/sqlite/example_sqlite.py +.. exampleinclude:: /../../providers/tests/system/sqlite/example_sqlite.py :language: python :start-after: [START howto_operator_sqlite] :end-before: [END howto_operator_sqlite] Furthermore, you can use an external file to execute the SQL commands. Script folder must be at the same level as DAG.py file. -.. exampleinclude:: /../../tests/system/providers/sqlite/example_sqlite.py +.. exampleinclude:: /../../providers/tests/system/sqlite/example_sqlite.py :language: python :start-after: [START howto_operator_sqlite_external_file] :end-before: [END howto_operator_sqlite_external_file] diff --git a/docs/apache-airflow-providers-ssh/changelog.rst b/docs/apache-airflow-providers-ssh/changelog.rst index b2212a396cc68..33bcb1f1b4086 100644 --- a/docs/apache-airflow-providers-ssh/changelog.rst +++ b/docs/apache-airflow-providers-ssh/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/ssh/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/ssh/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-standard/changelog.rst b/docs/apache-airflow-providers-standard/changelog.rst index 3d9d5b25f5540..2e1b328f5e5e2 100644 --- a/docs/apache-airflow-providers-standard/changelog.rst +++ b/docs/apache-airflow-providers-standard/changelog.rst @@ -15,4 +15,4 @@ specific language governing permissions and limitations under the License. -.. include:: ../../airflow/providers/standard/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/standard/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-tableau/changelog.rst b/docs/apache-airflow-providers-tableau/changelog.rst index 527d56d2c4242..7716b6ab1bf64 100644 --- a/docs/apache-airflow-providers-tableau/changelog.rst +++ b/docs/apache-airflow-providers-tableau/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/tableau/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/tableau/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-tableau/index.rst b/docs/apache-airflow-providers-tableau/index.rst index 0a4789db73124..e0f140e0fae4e 100644 --- a/docs/apache-airflow-providers-tableau/index.rst +++ b/docs/apache-airflow-providers-tableau/index.rst @@ -43,14 +43,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/tableau/index> + System Tests <_api/tests/system/tableau/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-tableau/operators.rst b/docs/apache-airflow-providers-tableau/operators.rst index ba250e1be41fe..7452d7899f4dc 100644 --- a/docs/apache-airflow-providers-tableau/operators.rst +++ b/docs/apache-airflow-providers-tableau/operators.rst @@ -68,7 +68,7 @@ Using the Operator An example usage of the TableauOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/tableau/example_tableau.py +.. exampleinclude:: /../../providers/tests/system/tableau/example_tableau.py :language: python :start-after: [START howto_operator_tableau] :end-before: [END howto_operator_tableau] diff --git a/docs/apache-airflow-providers-telegram/changelog.rst b/docs/apache-airflow-providers-telegram/changelog.rst index 704e8de2f6f26..073d54c1b5e83 100644 --- a/docs/apache-airflow-providers-telegram/changelog.rst +++ b/docs/apache-airflow-providers-telegram/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/telegram/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/telegram/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-telegram/index.rst b/docs/apache-airflow-providers-telegram/index.rst index 5c38d229986b0..58efd772bb28f 100644 --- a/docs/apache-airflow-providers-telegram/index.rst +++ b/docs/apache-airflow-providers-telegram/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/telegram/index> + System Tests <_api/tests/system/telegram/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-telegram/operators.rst b/docs/apache-airflow-providers-telegram/operators.rst index 0f3ad86b19d09..de693f54001e4 100644 --- a/docs/apache-airflow-providers-telegram/operators.rst +++ b/docs/apache-airflow-providers-telegram/operators.rst @@ -48,7 +48,7 @@ the connection metadata is structured as follows: An example usage of the TelegramOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/telegram/example_telegram.py +.. exampleinclude:: /../../providers/tests/system/telegram/example_telegram.py :language: python :start-after: [START howto_operator_telegram] :end-before: [END howto_operator_telegram] diff --git a/docs/apache-airflow-providers-teradata/changelog.rst b/docs/apache-airflow-providers-teradata/changelog.rst index 90c80663b40d8..577e6333e2053 100644 --- a/docs/apache-airflow-providers-teradata/changelog.rst +++ b/docs/apache-airflow-providers-teradata/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/teradata/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/teradata/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-teradata/index.rst b/docs/apache-airflow-providers-teradata/index.rst index 44310a10b1e70..ecf00232484d1 100644 --- a/docs/apache-airflow-providers-teradata/index.rst +++ b/docs/apache-airflow-providers-teradata/index.rst @@ -49,7 +49,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/teradata/index> + System Tests <_api/tests/system/teradata/index> System Tests Dashboard .. toctree:: diff --git a/docs/apache-airflow-providers-teradata/operators/azure_blob_to_teradata.rst b/docs/apache-airflow-providers-teradata/operators/azure_blob_to_teradata.rst index 194eabd0cd001..4cc3da46a902f 100644 --- a/docs/apache-airflow-providers-teradata/operators/azure_blob_to_teradata.rst +++ b/docs/apache-airflow-providers-teradata/operators/azure_blob_to_teradata.rst @@ -60,7 +60,7 @@ Transferring data from public Azure Blob Storage to Teradata An example usage of the AzureBlobStorageToTeradataOperator to transfer CSV data format from public Azure Blob Storage to teradata table is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py :language: python :start-after: [START azure_blob__to_teradata_transfer_operator_howto_guide_transfer_data_public_blob_to_teradata_csv] :end-before: [END azure_blob__to_teradata_transfer_operator_howto_guide_transfer_data_public_blob_to_teradata_csv] @@ -71,7 +71,7 @@ Transferring data from private Azure Blob Storage to Teradata with AWS connectio An example usage of the AzureBlobStorageToTeradataOperator to transfer CSV data format from private S3 object store to teradata with AWS credentials defined as AWS connection: -.. exampleinclude:: /../../tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py :language: python :start-after: [START azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_access_blob_to_teradata_csv] :end-before: [END azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_access_blob_to_teradata_csv] @@ -85,7 +85,7 @@ database object should exists in Teradata database to use it in transferring dat An example usage of the AzureBlobStorageToTeradataOperator to transfer CSV data format from private S3 object store to teradata with Authorization database object defined in Teradata. -.. exampleinclude:: /../../tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py :language: python :start-after: [START azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_authorization_blob_to_teradata_csv] :end-before: [END azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_authorization_blob_to_teradata_csv] @@ -96,7 +96,7 @@ Transferring data in CSV format from Azure Blob Storage to Teradata An example usage of the AzureBlobStorageToTeradataOperator to transfer CSV data format from Azure Blob Storage to teradata table is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py :language: python :start-after: [START azure_blob__to_teradata_transfer_operator_howto_guide_transfer_data_public_blob_to_teradata_csv] :end-before: [END azure_blob__to_teradata_transfer_operator_howto_guide_transfer_data_public_blob_to_teradata_csv] @@ -107,7 +107,7 @@ Transferring data in JSON format from Azure Blob Storage to Teradata An example usage of the AzureBlobStorageToTeradataOperator to transfer JSON data format from Azure Blob Storage to teradata table is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py :language: python :start-after: [START azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_json] :end-before: [END azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_json] @@ -118,7 +118,7 @@ Transferring data in PARQUET format from Azure Blob Storage to Teradata An example usage of the AzureBlobStorageToTeradataOperator to transfer PARQUET data format from Azure Blob Storage to teradata table is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py :language: python :start-after: [START azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_parquet] :end-before: [END azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_parquet] @@ -128,7 +128,7 @@ The complete ``AzureBlobStorageToTeradataOperator`` Operator DAG When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py :language: python :start-after: [START azure_blob_to_teradata_transfer_operator_howto_guide] :end-before: [END azure_blob_to_teradata_transfer_operator_howto_guide] diff --git a/docs/apache-airflow-providers-teradata/operators/compute_cluster.rst b/docs/apache-airflow-providers-teradata/operators/compute_cluster.rst index ceaf27ee74676..507aac5c90ac6 100644 --- a/docs/apache-airflow-providers-teradata/operators/compute_cluster.rst +++ b/docs/apache-airflow-providers-teradata/operators/compute_cluster.rst @@ -33,7 +33,7 @@ to provision the new Compute Cluster in Teradata Vantage Cloud Lake. An example usage of the TeradataComputeClusterProvisionOperator to provision the new Compute Cluster in Teradata Vantage Cloud Lake is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_compute_cluster.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_compute_cluster.py :language: python :start-after: [START teradata_vantage_lake_compute_cluster_provision_howto_guide] :end-before: [END teradata_vantage_lake_compute_cluster_provision_howto_guide] @@ -56,7 +56,7 @@ to decommission the specified Teradata Vantage Cloud Lake Compute Cluster. An example usage of the TeradataComputeClusterDecommissionOperator to decommission the specified Teradata Vantage Cloud Lake Compute Cluster is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_compute_cluster.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_compute_cluster.py :language: python :start-after: [START teradata_vantage_lake_compute_cluster_decommission_howto_guide] :end-before: [END teradata_vantage_lake_compute_cluster_decommission_howto_guide] @@ -79,7 +79,7 @@ to start the specified Compute Cluster in Teradata Vantage Cloud Lake. An example usage of the TeradataComputeClusterSuspendOperator to start the specified Compute Cluster in Teradata Vantage Cloud Lake is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_compute_cluster.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_compute_cluster.py :language: python :start-after: [START teradata_vantage_lake_compute_cluster_resume_howto_guide] :end-before: [END teradata_vantage_lake_compute_cluster_resume_howto_guide] @@ -101,7 +101,7 @@ to suspend the specified Compute Cluster in Teradata Vantage Cloud Lake. An example usage of the TeradataComputeClusterSuspendOperator to suspend the specified Compute Cluster in Teradata Vantage Cloud Lake is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_compute_cluster.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_compute_cluster.py :language: python :start-after: [START teradata_vantage_lake_compute_cluster_suspend_howto_guide] :end-before: [END teradata_vantage_lake_compute_cluster_suspend_howto_guide] diff --git a/docs/apache-airflow-providers-teradata/operators/s3_to_teradata.rst b/docs/apache-airflow-providers-teradata/operators/s3_to_teradata.rst index da52e2841bfa3..e24887126a2a5 100644 --- a/docs/apache-airflow-providers-teradata/operators/s3_to_teradata.rst +++ b/docs/apache-airflow-providers-teradata/operators/s3_to_teradata.rst @@ -45,7 +45,7 @@ Transferring data in CSV format from S3 to Teradata An example usage of the S3ToTeradataOperator to transfer CSV data format from S3 to teradata table is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_s3_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_s3_to_teradata_transfer.py :language: python :start-after: [START s3_to_teradata_transfer_operator_howto_guide_transfer_data_public_s3_to_teradata_csv] :end-before: [END s3_to_teradata_transfer_operator_howto_guide_transfer_data_public_s3_to_teradata_csv] @@ -55,7 +55,7 @@ Transferring data in JSON format from S3 to Teradata An example usage of the S3ToTeradataOperator to transfer JSON data format from S3 to teradata table is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_s3_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_s3_to_teradata_transfer.py :language: python :start-after: [START s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_json] :end-before: [END s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_json] @@ -65,7 +65,7 @@ Transferring data in PARQUET format from S3 to Teradata An example usage of the S3ToTeradataOperator to transfer PARQUET data format from S3 to teradata table is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_s3_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_s3_to_teradata_transfer.py :language: python :start-after: [START s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_parquet] :end-before: [END s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_parquet] @@ -75,7 +75,7 @@ The complete ``S3ToTeradataOperator`` Operator DAG When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/teradata/example_s3_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_s3_to_teradata_transfer.py :language: python :start-after: [START s3_to_teradata_transfer_operator_howto_guide] :end-before: [END s3_to_teradata_transfer_operator_howto_guide] diff --git a/docs/apache-airflow-providers-teradata/operators/teradata.rst b/docs/apache-airflow-providers-teradata/operators/teradata.rst index 6fd7d371a7b09..78e2e1b12e800 100644 --- a/docs/apache-airflow-providers-teradata/operators/teradata.rst +++ b/docs/apache-airflow-providers-teradata/operators/teradata.rst @@ -33,7 +33,7 @@ Creating a Teradata database table An example usage of the TeradataOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :dedent: 4 :start-after: [START teradata_operator_howto_guide_create_table] @@ -42,7 +42,7 @@ An example usage of the TeradataOperator is as follows: You can also use an external file to execute the SQL commands. External file must be at the same level as DAG.py file. This way you can easily maintain the SQL queries separated from the code. -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :start-after: [START teradata_operator_howto_guide_create_table_from_external_file] :end-before: [END teradata_operator_howto_guide_create_table_from_external_file] @@ -63,7 +63,7 @@ Inserting data into a Teradata database table --------------------------------------------- We can then create a TeradataOperator task that populate the ``Users`` table. -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :start-after: [START teradata_operator_howto_guide_populate_table] :end-before: [END teradata_operator_howto_guide_populate_table] @@ -74,7 +74,7 @@ Fetching records from your Teradata database table Fetching records from your Teradata database table can be as simple as: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :start-after: [START teradata_operator_howto_guide_get_all_countries] :end-before: [END teradata_operator_howto_guide_get_all_countries] @@ -88,7 +88,7 @@ SQL requests during runtime. To find the countries in Asian continent: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :start-after: [START teradata_operator_howto_guide_params_passing_get_query] :end-before: [END teradata_operator_howto_guide_params_passing_get_query] @@ -99,7 +99,7 @@ Dropping a Teradata database table We can then create a TeradataOperator task that drops the ``Users`` table. -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :start-after: [START teradata_operator_howto_guide_drop_users_table] :end-before: [END teradata_operator_howto_guide_drop_users_table] @@ -109,7 +109,7 @@ The complete Teradata Operator DAG When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :start-after: [START teradata_operator_howto_guide] :end-before: [END teradata_operator_howto_guide] @@ -152,21 +152,21 @@ This stored procedure can be invoked using One approach involves passing parameters positionally as a list, with output parameters specified as Python data types: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_call_sp.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_call_sp.py :language: python :start-after: [START howto_call_teradata_stored_procedure_operator_with_types] :end-before: [END howto_call_teradata_stored_procedure_operator_with_types] Alternatively, parameters can be passed positionally as a list, with output parameters designated as placeholders: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_call_sp.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_call_sp.py :language: python :start-after: [START howto_call_teradata_stored_procedure_operator_with_place_holder] :end-before: [END howto_call_teradata_stored_procedure_operator_with_place_holder] Another method entails passing parameters positionally as a dictionary: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_call_sp.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_call_sp.py :language: python :start-after: [START howto_call_teradata_stored_procedure_operator_with_dict_input] :end-before: [END howto_call_teradata_stored_procedure_operator_with_dict_input] @@ -186,7 +186,7 @@ This stored procedure yields a singular timestamp argument, out_timestamp, and i :class:`~airflow.providers.teradata.operators.teradata.TeradataStoredProcedureOperator` with parameters passed positionally as a list: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_call_sp.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_call_sp.py :language: python :start-after: [START howto_call_teradata_stored_procedure_operator_timestamp] :end-before: [END howto_call_teradata_stored_procedure_operator_timestamp] @@ -213,7 +213,7 @@ This stored procedure can be invoked using :class:`~airflow.providers.teradata.operators.teradata.TeradataStoredProcedureOperator` with parameters passed positionally as a list: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_call_sp.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_call_sp.py :language: python :start-after: [START howto_teradata_stored_procedure_operator_with_in_out_dynamic_result] :end-before: [END howto_teradata_stored_procedure_operator_with_in_out_dynamic_result] @@ -223,7 +223,7 @@ The complete TeradataStoredProcedureOperator DAG When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_call_sp.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_call_sp.py :language: python :start-after: [START howto_teradata_operator_for_sp] :end-before: [END howto_teradata_operator_for_sp] diff --git a/docs/apache-airflow-providers-teradata/operators/teradata_to_teradata.rst b/docs/apache-airflow-providers-teradata/operators/teradata_to_teradata.rst index 6c551427f0737..ec2a414ff7d0a 100644 --- a/docs/apache-airflow-providers-teradata/operators/teradata_to_teradata.rst +++ b/docs/apache-airflow-providers-teradata/operators/teradata_to_teradata.rst @@ -32,7 +32,7 @@ To transfer data between two Teradata instances, use the An example usage of the TeradataToTeradataOperator is as follows: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata_to_teradata_transfer.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata_to_teradata_transfer.py :language: python :start-after: [START teradata_to_teradata_transfer_operator_howto_guide_transfer_data] :end-before: [END teradata_to_teradata_transfer_operator_howto_guide_transfer_data] @@ -42,7 +42,7 @@ The complete TeradataToTeradata Transfer Operator DAG When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/teradata/example_teradata.py +.. exampleinclude:: /../../providers/tests/system/teradata/example_teradata.py :language: python :start-after: [START teradata_operator_howto_guide] :end-before: [END teradata_operator_howto_guide] diff --git a/docs/apache-airflow-providers-trino/changelog.rst b/docs/apache-airflow-providers-trino/changelog.rst index e29b41a315293..c3d1995dd1ea6 100644 --- a/docs/apache-airflow-providers-trino/changelog.rst +++ b/docs/apache-airflow-providers-trino/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/trino/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/trino/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-trino/index.rst b/docs/apache-airflow-providers-trino/index.rst index 9a17b5907e5db..70a8d85c9a2ea 100644 --- a/docs/apache-airflow-providers-trino/index.rst +++ b/docs/apache-airflow-providers-trino/index.rst @@ -50,14 +50,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/trino/index> + System Tests <_api/tests/system/trino/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-trino/operators/transfer/gcs_to_trino.rst b/docs/apache-airflow-providers-trino/operators/transfer/gcs_to_trino.rst index 39224b3ca3d05..4d1cbca9d14d2 100644 --- a/docs/apache-airflow-providers-trino/operators/transfer/gcs_to_trino.rst +++ b/docs/apache-airflow-providers-trino/operators/transfer/gcs_to_trino.rst @@ -39,7 +39,7 @@ This operator assumes that CSV does not have headers and the data is correspondi pre-existing presto table. Optionally, you can provide schema as tuple/list of strings or as a path to a JSON file in the same bucket as the CSV file. -.. exampleinclude:: /../../tests/system/providers/trino/example_gcs_to_trino.py +.. exampleinclude:: /../../providers/tests/system/trino/example_gcs_to_trino.py :language: python :dedent: 4 :start-after: [START gcs_csv_to_trino_table] diff --git a/docs/apache-airflow-providers-trino/operators/trino.rst b/docs/apache-airflow-providers-trino/operators/trino.rst index d0e901ebed9fd..dc076c82c7c2d 100644 --- a/docs/apache-airflow-providers-trino/operators/trino.rst +++ b/docs/apache-airflow-providers-trino/operators/trino.rst @@ -34,7 +34,7 @@ Use the ``trino_conn_id`` argument to connect to your Trino instance An example usage of the SQLExecuteQueryOperator to connect to Trino is as follows: -.. exampleinclude:: /../../tests/system/providers/trino/example_trino.py +.. exampleinclude:: /../../providers/tests/system/trino/example_trino.py :language: python :start-after: [START howto_operator_trino] :end-before: [END howto_operator_trino] diff --git a/docs/apache-airflow-providers-vertica/changelog.rst b/docs/apache-airflow-providers-vertica/changelog.rst index 1f8dd4aa880e2..2c77a34914de2 100644 --- a/docs/apache-airflow-providers-vertica/changelog.rst +++ b/docs/apache-airflow-providers-vertica/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/vertica/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/vertica/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-weaviate/changelog.rst b/docs/apache-airflow-providers-weaviate/changelog.rst index c2fc65bc5ceb4..dd6e11ef6876d 100644 --- a/docs/apache-airflow-providers-weaviate/changelog.rst +++ b/docs/apache-airflow-providers-weaviate/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/weaviate/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/weaviate/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-weaviate/index.rst b/docs/apache-airflow-providers-weaviate/index.rst index c2a4965414fe3..fd7998daac395 100644 --- a/docs/apache-airflow-providers-weaviate/index.rst +++ b/docs/apache-airflow-providers-weaviate/index.rst @@ -60,7 +60,7 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/weaviate/index> + System Tests <_api/tests/system/weaviate/index> .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-weaviate/operators/weaviate.rst b/docs/apache-airflow-providers-weaviate/operators/weaviate.rst index 5ec262ab7a2d3..ae9fcdba58eed 100644 --- a/docs/apache-airflow-providers-weaviate/operators/weaviate.rst +++ b/docs/apache-airflow-providers-weaviate/operators/weaviate.rst @@ -33,28 +33,28 @@ connect to your account. An example using the operator to ingest data with custom vectors retrieved from XCOM: -.. exampleinclude:: /../../tests/system/providers/weaviate/example_weaviate_operator.py +.. exampleinclude:: /../../providers/tests/system/weaviate/example_weaviate_operator.py :language: python :start-after: [START howto_operator_weaviate_embedding_and_ingest_xcom_data_with_vectors] :end-before: [END howto_operator_weaviate_embedding_and_ingest_xcom_data_with_vectors] An example using the operator to ingest data with custom vectors retrieved from a python callable: -.. exampleinclude:: /../../tests/system/providers/weaviate/example_weaviate_operator.py +.. exampleinclude:: /../../providers/tests/system/weaviate/example_weaviate_operator.py :language: python :start-after: [START howto_operator_weaviate_embedding_and_ingest_callable_data_with_vectors] :end-before: [END howto_operator_weaviate_embedding_and_ingest_callable_data_with_vectors] An example using the operator to ingest data without vectors retrieved from XCOM for which the operator would generate embedding vectors: -.. exampleinclude:: /../../tests/system/providers/weaviate/example_weaviate_operator.py +.. exampleinclude:: /../../providers/tests/system/weaviate/example_weaviate_operator.py :language: python :start-after: [START howto_operator_weaviate_ingest_xcom_data_without_vectors] :end-before: [END howto_operator_weaviate_ingest_xcom_data_without_vectors] An example using the operator to ingest data without vectors retrieved from a python callable for which the operator would generate embedding vectors: -.. exampleinclude:: /../../tests/system/providers/weaviate/example_weaviate_operator.py +.. exampleinclude:: /../../providers/tests/system/weaviate/example_weaviate_operator.py :language: python :start-after: [START howto_operator_weaviate_ingest_callable_data_without_vectors] :end-before: [END howto_operator_weaviate_ingest_callable_data_without_vectors] diff --git a/docs/apache-airflow-providers-yandex/changelog.rst b/docs/apache-airflow-providers-yandex/changelog.rst index 066c3c5be0be2..9bcad616eb83d 100644 --- a/docs/apache-airflow-providers-yandex/changelog.rst +++ b/docs/apache-airflow-providers-yandex/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/yandex/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/yandex/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-yandex/index.rst b/docs/apache-airflow-providers-yandex/index.rst index 03495bcef4611..08f3953aefeb4 100644 --- a/docs/apache-airflow-providers-yandex/index.rst +++ b/docs/apache-airflow-providers-yandex/index.rst @@ -51,14 +51,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/yandex/index> + System Tests <_api/tests/system/yandex/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-yandex/operators/dataproc.rst b/docs/apache-airflow-providers-yandex/operators/dataproc.rst index 2bb08d859de4f..03dfd3acae817 100644 --- a/docs/apache-airflow-providers-yandex/operators/dataproc.rst +++ b/docs/apache-airflow-providers-yandex/operators/dataproc.rst @@ -34,4 +34,4 @@ that can be integrated with Apache Hadoop and other storage systems. Using the operators ^^^^^^^^^^^^^^^^^^^ To learn how to use Data Proc operators, -see `example DAGs `_. +see `example DAGs `_. diff --git a/docs/apache-airflow-providers-yandex/operators/yq.rst b/docs/apache-airflow-providers-yandex/operators/yq.rst index 78bdb733ee1ff..23bd4ac336160 100644 --- a/docs/apache-airflow-providers-yandex/operators/yq.rst +++ b/docs/apache-airflow-providers-yandex/operators/yq.rst @@ -25,4 +25,4 @@ Yandex Query Operators Using the operators ^^^^^^^^^^^^^^^^^^^ To learn how to use Yandex Query operator, -see `example DAG `__. +see `example DAG `__. diff --git a/docs/apache-airflow-providers-ydb/changelog.rst b/docs/apache-airflow-providers-ydb/changelog.rst index 801c69978c6b6..e77ade1c93725 100644 --- a/docs/apache-airflow-providers-ydb/changelog.rst +++ b/docs/apache-airflow-providers-ydb/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/ydb/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/ydb/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-ydb/index.rst b/docs/apache-airflow-providers-ydb/index.rst index 4dff0e421f9bd..30b8e90d97531 100644 --- a/docs/apache-airflow-providers-ydb/index.rst +++ b/docs/apache-airflow-providers-ydb/index.rst @@ -49,14 +49,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/ydb/index> + System Tests <_api/tests/system/ydb/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow-providers-ydb/operators/ydb_operator_howto_guide.rst b/docs/apache-airflow-providers-ydb/operators/ydb_operator_howto_guide.rst index 894be8101d5b6..9416894e53324 100644 --- a/docs/apache-airflow-providers-ydb/operators/ydb_operator_howto_guide.rst +++ b/docs/apache-airflow-providers-ydb/operators/ydb_operator_howto_guide.rst @@ -50,7 +50,7 @@ Creating an YDB table The code snippets below are based on Airflow-2.0 -.. exampleinclude:: /../../tests/system/providers/ydb/example_ydb.py +.. exampleinclude:: /../../providers/tests/system/ydb/example_ydb.py :language: python :start-after: [START ydb_operator_howto_guide] :end-before: [END ydb_operator_howto_guide_create_pet_table] @@ -187,7 +187,7 @@ The complete YDB Operator DAG When we put everything together, our DAG should look like this: -.. exampleinclude:: /../../tests/system/providers/ydb/example_ydb.py +.. exampleinclude:: /../../providers/tests/system/ydb/example_ydb.py :language: python :start-after: [START ydb_operator_howto_guide] :end-before: [END ydb_operator_howto_guide] diff --git a/docs/apache-airflow-providers-zendesk/changelog.rst b/docs/apache-airflow-providers-zendesk/changelog.rst index 3be2afdb2306b..eacd3ee51d1bd 100644 --- a/docs/apache-airflow-providers-zendesk/changelog.rst +++ b/docs/apache-airflow-providers-zendesk/changelog.rst @@ -22,4 +22,4 @@ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY -.. include:: ../../airflow/providers/zendesk/CHANGELOG.rst +.. include:: ../../providers/src/airflow/providers/zendesk/CHANGELOG.rst diff --git a/docs/apache-airflow-providers-zendesk/index.rst b/docs/apache-airflow-providers-zendesk/index.rst index 0b2852be24280..68d9576441888 100644 --- a/docs/apache-airflow-providers-zendesk/index.rst +++ b/docs/apache-airflow-providers-zendesk/index.rst @@ -48,14 +48,14 @@ :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/zendesk/index> + System Tests <_api/tests/system/zendesk/index> .. toctree:: :hidden: :maxdepth: 1 :caption: Resources - Example DAGs + Example DAGs PyPI Repository Installing from sources diff --git a/docs/apache-airflow/tutorial/taskflow.rst b/docs/apache-airflow/tutorial/taskflow.rst index aac04f9b53454..e15e1c78045c2 100644 --- a/docs/apache-airflow/tutorial/taskflow.rst +++ b/docs/apache-airflow/tutorial/taskflow.rst @@ -307,7 +307,7 @@ Below is an example of using the ``@task.docker`` decorator to run a Python task .. _taskflow/docker_example: -.. exampleinclude:: /../../tests/system/providers/docker/example_taskflow_api_docker_virtualenv.py +.. exampleinclude:: /../../providers/tests/system/docker/example_taskflow_api_docker_virtualenv.py :language: python :dedent: 4 :start-after: [START transform_docker] @@ -338,7 +338,7 @@ Below is an example of using the ``@task.kubernetes`` decorator to run a Python .. _taskflow/kubernetes_example: -.. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes_decorator.py +.. exampleinclude:: /../../providers/tests/system/cncf/kubernetes/example_kubernetes_decorator.py :language: python :dedent: 4 :start-after: [START howto_operator_kubernetes] diff --git a/docs/build_docs.py b/docs/build_docs.py index f856d6828e8ed..82edc9d1632d3 100755 --- a/docs/build_docs.py +++ b/docs/build_docs.py @@ -35,7 +35,7 @@ from tabulate import tabulate from docs.exts.docs_build import dev_index_generator, lint_checks -from docs.exts.docs_build.code_utils import CONSOLE_WIDTH, PROVIDER_INIT_FILE +from docs.exts.docs_build.code_utils import CONSOLE_WIDTH from docs.exts.docs_build.docs_builder import DOCS_DIR, AirflowDocsBuilder, get_available_packages from docs.exts.docs_build.errors import DocBuildError, display_errors_summary from docs.exts.docs_build.fetch_inventories import fetch_inventories @@ -566,9 +566,6 @@ def main(): if not package_filters: _promote_new_flags() - if os.path.exists(PROVIDER_INIT_FILE): - os.remove(PROVIDER_INIT_FILE) - print_build_errors_and_exit( all_build_errors, all_spelling_errors, diff --git a/docs/conf.py b/docs/conf.py index 4d01e402195a5..a09e54db63d20 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -32,6 +32,7 @@ # All configuration values have a default; values that are commented out # serve to show the default. import json +import logging import os import pathlib import re @@ -74,13 +75,17 @@ ) except StopIteration: raise RuntimeError(f"Could not find provider.yaml file for package: {PACKAGE_NAME}") - PACKAGE_DIR = pathlib.Path(CURRENT_PROVIDER["package-dir"]) + + # Oddity: since we set autoapi_python_use_implicit_namespaces for provider packages, it does a "../"on the + # dir we give it. So we want to set the package dir to be airflow so it goes up to src, else we end up + # with "src" in the output paths of modules which we don't want + PACKAGE_DIR = ROOT_DIR / "providers" / "src" / "airflow" PACKAGE_VERSION = CURRENT_PROVIDER["versions"][0] - SYSTEM_TESTS_DIR = CURRENT_PROVIDER["system-tests-dir"] + SYSTEM_TESTS_DIR = ROOT_DIR / "providers" / "tests" / "system" elif PACKAGE_NAME == "apache-airflow-providers": from provider_yaml_utils import load_package_data - PACKAGE_DIR = ROOT_DIR / "airflow" / "providers" + PACKAGE_DIR = ROOT_DIR / "providers" / "src" PACKAGE_VERSION = "devel" ALL_PROVIDER_YAMLS = load_package_data() SYSTEM_TESTS_DIR = None @@ -108,8 +113,13 @@ global_substitutions = { "version": PACKAGE_VERSION, "airflow-version": airflow.__version__, + "experimental": "This is an :ref:`experimental feature `.", } +if PACKAGE_NAME != "apache-airflow": + global_substitutions["experimental"] = "This is an :external:ref:`experimental feature `." + + # == Sphinx configuration ====================================================== # -- Project information ------------------------------------------------------- @@ -125,13 +135,7 @@ # -- General configuration ----------------------------------------------------- # See: https://www.sphinx-doc.org/en/master/usage/configuration.html -rst_epilog = "\n".join( - f".. |{key}| replace:: {replace}" - for key, replace in { - **global_substitutions, - "experimental": "This is an :ref:`experimental feature `.", - }.items() -) +rst_epilog = "\n".join(f".. |{key}| replace:: {replace}" for key, replace in global_substitutions.items()) smartquotes_excludes = {"builders": ["man", "text", "spelling"]} @@ -139,7 +143,6 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - "provider_init_hack", "sphinx.ext.autodoc", "sphinx.ext.viewcode", "sphinxarg.ext", @@ -740,8 +743,6 @@ def _get_params(root_schema: dict, prefix: str = "", default_section: str = "") if PACKAGE_NAME != "docker-stack": autoapi_dirs.append(PACKAGE_DIR) -if SYSTEM_TESTS_DIR and os.path.exists(SYSTEM_TESTS_DIR): - autoapi_dirs.append(SYSTEM_TESTS_DIR) # A directory that has user-defined templates to override our default templates. if PACKAGE_NAME == "apache-airflow": @@ -755,16 +756,74 @@ def _get_params(root_schema: dict, prefix: str = "", default_section: str = "") "*/node_modules/*", "*/migrations/*", "*/contrib/*", - "**/example_taskflow_api_docker_virtualenv.py", - "**/example_dag_decorator.py", + "*/example_taskflow_api_docker_virtualenv.py", + "*/example_dag_decorator.py", + "*/conftest.py", + "*/tests/__init__.py", + "*/tests/system/__init__.py", + "*/test_aws_auth_manager.py", + # These sub-folders aren't really providers, but we need __init__.py files else various tools (ruff, mypy) + # get confused by providers/tests/systems/cncf/kubernetes and think that folder is the top level + # kubernetes module! + "*/providers/tests/__init__.py", + "*/providers/tests/cncf/__init__.py", + "*/providers/tests/common/__init__.py", + "*/providers/tests/apache/__init__.py", + "*/providers/tests/dbt/__init__.py", + "*/providers/tests/microsoft/__init__.py", + "*/providers/tests/system/__init__.py", + "*/providers/tests/system/apache/__init__.py", + "*/providers/tests/system/cncf/__init__.py", + "*/providers/tests/system/common/__init__.py", + "*/providers/tests/system/dbt/__init__.py", + "*/providers/tests/system/microsoft/__init__.py", ] -if PACKAGE_NAME == "apache-airflow": - autoapi_ignore.append("*/airflow/providers/*") -elif PACKAGE_NAME == "docker-stack": - autoapi_ignore.append("*/airflow/providers/*") + +ignore_re = re.compile(r"\[AutoAPI\] .* Ignoring \s (?P/[\w/.]*)", re.VERBOSE) + + +# Make the "Ignoring /..." log messages slightly less verbose +def filter_ignore(record: logging.LogRecord) -> bool: + matches = ignore_re.search(record.msg) + if not matches: + return True + if matches["path"].endswith("__init__.py"): + record.msg = record.msg.replace("__init__.py", "") + return True + return False + + +autoapi_log = logging.getLogger("sphinx.autoapi.mappers.base") +autoapi_log.addFilter(filter_ignore) + +if PACKAGE_NAME.startswith("apache-airflow-providers-"): + autoapi_python_use_implicit_namespaces = True + from provider_yaml_utils import load_package_data + + autoapi_ignore.extend( + ( + "*/airflow/__init__.py", + "*/airflow/providiers/__init__.py", + "*/example_dags/*", + "*/airflow/providers/cncf/kubernetes/backcompat/*", + ) + ) + + for p in load_package_data(include_suspended=True): + if p["package-name"] == PACKAGE_NAME: + continue + autoapi_ignore.extend((p["package-dir"] + "/*", p["system-tests-dir"] + "/*")) + + autoapi_keep_files = True + + if SYSTEM_TESTS_DIR and os.path.exists(SYSTEM_TESTS_DIR): + test_dir = SYSTEM_TESTS_DIR.parent + autoapi_dirs.append(test_dir) + + autoapi_ignore.extend(f"{d}/*" for d in test_dir.glob("*") if d.is_dir() and d.name != "system") else: - autoapi_ignore.append("*/airflow/providers/cncf/kubernetes/backcompat/*") - autoapi_ignore.append("*/example_dags/*") + if SYSTEM_TESTS_DIR and os.path.exists(SYSTEM_TESTS_DIR): + autoapi_dirs.append(SYSTEM_TESTS_DIR) # Keep the AutoAPI generated files on the filesystem after the run. # Useful for debugging. autoapi_keep_files = True diff --git a/docs/exts/docs_build/code_utils.py b/docs/exts/docs_build/code_utils.py index 6aef1ab1f3051..3bbade4beb59e 100644 --- a/docs/exts/docs_build/code_utils.py +++ b/docs/exts/docs_build/code_utils.py @@ -24,7 +24,6 @@ ROOT_PROJECT_DIR = os.path.abspath( os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, os.pardir) ) -PROVIDER_INIT_FILE = os.path.join(ROOT_PROJECT_DIR, "airflow", "providers", "__init__.py") DOCS_DIR = os.path.join(ROOT_PROJECT_DIR, "docs") AIRFLOW_DIR = os.path.join(ROOT_PROJECT_DIR, "airflow") diff --git a/docs/exts/exampleinclude.py b/docs/exts/exampleinclude.py index eade660515db1..ee5d9329d2a25 100644 --- a/docs/exts/exampleinclude.py +++ b/docs/exts/exampleinclude.py @@ -188,7 +188,18 @@ def create_node(env, relative_path, show_button): :param show_button: whether to show "view code" button :return paragraph with the node """ - pagename = "_modules/" + relative_path[:-3] + + # Strip "providers" out of the example title that we include/link to. The full path needs to include + # it so we can pull in the code, but we don't want it to show up in the rendered docs + if relative_path.startswith("providers/src/"): + relative_path = relative_path.replace("providers/src/", "", 1) + elif relative_path.startswith("providers/"): + relative_path = relative_path.replace("providers/", "", 1) + + if relative_path.endswith(".py"): + pagename = "_modules/" + relative_path[:-3] + else: + pagename = "_modules/" + relative_path header_classes = ["example-header"] if show_button: diff --git a/docs/exts/provider_init_hack.py b/docs/exts/provider_init_hack.py deleted file mode 100644 index 819a78b95d7bf..0000000000000 --- a/docs/exts/provider_init_hack.py +++ /dev/null @@ -1,56 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -""" -Bugs in sphinx-autoapi using metaclasses prevent us from upgrading to 1.3 -which has implicit namespace support. Until that time, we make it look -like a real package for building docs -""" - -from __future__ import annotations - -import os -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from sphinx.application import Sphinx - -ROOT_PROJECT_DIR = os.path.abspath( - os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) -) - -PROVIDER_INIT_FILE = os.path.join(ROOT_PROJECT_DIR, "airflow", "providers", "__init__.py") - - -def _create_init_py(app, config): - del app - del config - # This file is deleted by /docs/build_docs.py. If you are not using the script, the file will be - # deleted by pre-commit. - with open(PROVIDER_INIT_FILE, "w"): - pass - - -def setup(app: Sphinx): - """ - Sets the plugin up and returns configuration of the plugin. - - :param app: application. - :return json description of the configuration that is needed by the plugin. - """ - app.connect("config-inited", _create_init_py) - - return {"version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True} diff --git a/docs/exts/provider_yaml_utils.py b/docs/exts/provider_yaml_utils.py index 1eaf93a6986e7..6e5d3a835e1d0 100644 --- a/docs/exts/provider_yaml_utils.py +++ b/docs/exts/provider_yaml_utils.py @@ -18,7 +18,7 @@ import json import os -from glob import glob +from functools import lru_cache from pathlib import Path from typing import Any @@ -26,6 +26,8 @@ import yaml ROOT_DIR = Path(__file__).parents[2].resolve() +AIRFLOW_PROVIDERS_SRC = ROOT_DIR / "providers" / "src" +AIRFLOW_PROVIDERS_NS_PACKAGE = AIRFLOW_PROVIDERS_SRC / "airflow" / "providers" PROVIDER_DATA_SCHEMA_PATH = ROOT_DIR / "airflow" / "provider.yaml.schema.json" @@ -36,24 +38,22 @@ def _load_schema() -> dict[str, Any]: def _filepath_to_module(filepath: str): - return str(Path(filepath).relative_to(ROOT_DIR)).replace("/", ".") + return str(Path(filepath).relative_to(AIRFLOW_PROVIDERS_SRC)).replace("/", ".") def _filepath_to_system_tests(filepath: str): return str( - ROOT_DIR - / "tests" - / "system" - / "providers" - / Path(filepath).relative_to(ROOT_DIR / "airflow" / "providers") + ROOT_DIR / "providers" / "tests" / "system" / Path(filepath).relative_to(AIRFLOW_PROVIDERS_NS_PACKAGE) ) +@lru_cache def get_provider_yaml_paths(): """Returns list of provider.yaml files""" - return sorted(glob(f"{ROOT_DIR}/airflow/providers/**/provider.yaml", recursive=True)) + return sorted(AIRFLOW_PROVIDERS_NS_PACKAGE.rglob("**/provider.yaml")) +@lru_cache def load_package_data(include_suspended: bool = False) -> list[dict[str, Any]]: """ Load all data from providers files diff --git a/docs/exts/providers_extensions.py b/docs/exts/providers_extensions.py index 5de6dcb8eb7b2..1b59cc00aca3f 100644 --- a/docs/exts/providers_extensions.py +++ b/docs/exts/providers_extensions.py @@ -23,10 +23,8 @@ from pathlib import Path from typing import Any, Iterable -import yaml - # No stub exists for docutils.parsers.rst.directives. See https://github.com/python/typeshed/issues/5755. -from provider_yaml_utils import get_provider_yaml_paths +from provider_yaml_utils import load_package_data from docs.exts.operators_and_hooks_ref import ( DEFAULT_HEADER_SEPARATOR, @@ -64,7 +62,7 @@ def get_import_mappings(tree): def _get_module_class_registry( - module_filepath: str, class_extras: dict[str, Any] + module_filepath: Path, module_name: str, class_extras: dict[str, Any] ) -> dict[str, dict[str, Any]]: """Extracts classes and its information from a Python module file. @@ -80,7 +78,6 @@ def _get_module_class_registry( with open(module_filepath) as file: ast_obj = ast.parse(file.read()) - module_name = module_filepath.replace("/", ".").replace(".py", "").lstrip(".") import_mappings = get_import_mappings(ast_obj) module_class_registry = { f"{module_name}.{node.name}": { @@ -140,16 +137,26 @@ def _get_providers_class_registry() -> dict[str, dict[str, Any]]: :return: A dictionary with provider names as keys and a dictionary of classes as values. """ class_registry = {} - for provider_yaml_path in get_provider_yaml_paths(): - provider_yaml_content = yaml.safe_load(Path(provider_yaml_path).read_text()) - for root, _, file_names in os.walk(Path(provider_yaml_path).parent): + for provider_yaml_content in load_package_data(): + provider_pkg_root = Path(provider_yaml_content["package-dir"]) + for root, _, file_names in os.walk(provider_pkg_root): + folder = Path(root) for file_name in file_names: - module_filepath = f"{os.path.relpath(root)}/{file_name}" - if not module_filepath.endswith(".py") or module_filepath == "__init__.py": + if not file_name.endswith(".py") or file_name == "__init__.py": continue + module_filepath = folder.joinpath(file_name) + module_registry = _get_module_class_registry( module_filepath=module_filepath, + module_name=( + provider_yaml_content["python-module"] + + "." + + module_filepath.relative_to(provider_pkg_root) + .with_suffix("") + .as_posix() + .replace("/", ".") + ), class_extras={"provider_name": provider_yaml_content["package-name"]}, ) class_registry.update(module_registry) diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index 2d0ab90a3508b..e6a0deca23cbf 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -345,8 +345,7 @@ "devel-deps": [], "plugins": [], "cross-providers-deps": [ - "cncf.kubernetes", - "standard" + "cncf.kubernetes" ], "excluded-python-versions": [], "state": "ready" @@ -534,9 +533,7 @@ "plugin-class": "airflow.providers.edge.plugins.edge_executor_plugin.EdgeExecutorPlugin" } ], - "cross-providers-deps": [ - "standard" - ], + "cross-providers-deps": [], "excluded-python-versions": [], "state": "not-ready" }, diff --git a/providers/.gitignore b/providers/.gitignore new file mode 100644 index 0000000000000..2924614f3a149 --- /dev/null +++ b/providers/.gitignore @@ -0,0 +1,7 @@ +# Ignore init files in these non-provider folders. I.e. we relrease atlassian/jira, but not atlassian +src/airflow/providers/apache/__init__.py +src/airflow/providers/atlassian/__init__.py +src/airflow/providers/cncf/__init__.py +src/airflow/providers/common/__init__.py +src/airflow/providers/dbt/__init__.py +src/airflow/providers/microsoft/__init__.py diff --git a/tests/test_utils/__init__.py b/providers/__init__.py similarity index 74% rename from tests/test_utils/__init__.py rename to providers/__init__.py index 4c4790fa4b479..c53de5451bd08 100644 --- a/tests/test_utils/__init__.py +++ b/providers/__init__.py @@ -1,4 +1,3 @@ -# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -15,10 +14,10 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from __future__ import annotations - -import os -AIRFLOW_MAIN_FOLDER = os.path.realpath( - os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) -) +# This exists so that pytest doesn't get confused about namespace packages +# and think that `tests/conftest.py` and `providers/tests/conftest.py` are +# both "tests.conftest" +# +# This is a temporary solution until https://github.com/apache/airflow/issues/42632 +# is done diff --git a/providers/pyproject.toml b/providers/pyproject.toml new file mode 100644 index 0000000000000..093cbbd547141 --- /dev/null +++ b/providers/pyproject.toml @@ -0,0 +1,98 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +[build-system] +requires = ["hatchling", "hatch-vcs"] +build-backend = "hatchling.build" + +[project] +name = "local-providers" +version = "0.1.0" +description = "Placeholder package for local/from-sources providers." +requires-python = ">=3.8, <3.13" +classifiers = [ + "Private :: Do Not Upload", +] + +[tool.hatch.publish.index] +# Lets make doubly sure this never goes to PyPi +disable = true + +[tool.hatch.build.targets.wheel] +packages = ["src/airflow"] +exclude = [ + ".gitignore", + ".latest-doc-only-change.txt", + "CHANGELOG.rst", + "MANAGING_PROVIDERS_LIFECYCLE.rst", +] + +[tool.ruff] +extend = "../pyproject.toml" +src = ["src"] +namespace-packages = ["src/airflow/providers"] +extend-exclude = [ + # The files generated by stubgen aren't 100% valid syntax it turns out, and we don't ship them, so we can + # ignore them in ruff + "src/airflow/providers/common/sql/*/*.pyi", +] + +[tool.ruff.lint.per-file-ignores] + +# Ignore Doc rules et al for anything outside of tests +"!src/*" = ["D", "TID253", "S101", "TRY002"] + +# https://github.com/apache/airflow/issues/39252 +"src/airflow/providers/amazon/aws/hooks/eks.py" = ["W605"] + +# All of the modules which have an extra license header (i.e. that we copy from another project) need to +# ignore E402 -- module level import not at top level +"tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py" = ["E402"] +"tests/common/io/xcom/test_backend.py" = ["E402"] +"tests/elasticsearch/log/elasticmock/__init__.py" = ["E402"] +"tests/elasticsearch/log/elasticmock/utilities/__init__.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_auto_ml.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_custom_job.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_dataset.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_endpoint_service.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_generative_model.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_model_service.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_pipeline_job.py" = ["E402"] +"tests/google/cloud/hooks/vertex_ai/test_prediction_service.py" = ["E402"] +"tests/google/cloud/links/test_translate.py" = ["E402"] +"tests/google/cloud/operators/test_automl.py"= ["E402"] +"tests/google/cloud/operators/test_vertex_ai.py" = ["E402"] +"tests/google/cloud/operators/vertex_ai/test_generative_model.py" = ["E402"] +"tests/google/cloud/triggers/test_vertex_ai.py" = ["E402"] +"tests/openai/hooks/test_openai.py" = ["E402"] +"tests/openai/operators/test_openai.py" = ["E402"] +"tests/openai/triggers/test_openai.py" = ["E402"] +"tests/opensearch/conftest.py" = ["E402"] +"tests/opensearch/hooks/test_opensearch.py" = ["E402"] +"tests/opensearch/log/test_os_json_formatter.py" = ["E402"] +"tests/opensearch/log/test_os_response.py" = ["E402"] +"tests/opensearch/log/test_os_task_handler.py" = ["E402"] +"tests/opensearch/operators/test_opensearch.py" = ["E402"] +"tests/qdrant/hooks/test_qdrant.py" = ["E402"] +"tests/qdrant/operators/test_qdrant.py" = ["E402"] +"tests/snowflake/operators/test_snowflake_sql.py" = ["E402"] +"tests/yandex/**/*.py" = ["E402"] + +# https://github.com/apache/airflow/issues/39252 +"airflow/providers/amazon/aws/hooks/eks.py" = ["W605"] diff --git a/providers/src/airflow/providers/.gitignore b/providers/src/airflow/providers/.gitignore new file mode 100644 index 0000000000000..528066d9003e2 --- /dev/null +++ b/providers/src/airflow/providers/.gitignore @@ -0,0 +1 @@ +/__init__.py diff --git a/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst b/providers/src/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst similarity index 98% rename from airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst rename to providers/src/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst index 3d3e95c28b172..c5e6ec1287854 100644 --- a/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst +++ b/providers/src/airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst @@ -108,7 +108,7 @@ breeze and I'll run unit tests for my Hook. .. code-block:: bash - root@fafd8d630e46:/opt/airflow# python -m pytest tests/providers//hook/test_*.py + root@fafd8d630e46:/opt/airflow# python -m pytest providers/tests//hook/test_*.py Adding chicken-egg providers ---------------------------- @@ -341,23 +341,23 @@ Example failing collection after ``google`` provider has been suspended: .. code-block:: txt - _____ ERROR collecting tests/providers/apache/beam/operators/test_beam.py ______ - ImportError while importing test module '/opt/airflow/tests/providers/apache/beam/operators/test_beam.py'. + _____ ERROR collecting providers/tests/apache/beam/operators/test_beam.py ______ + ImportError while importing test module '/opt/airflow/providers/tests/apache/beam/operators/test_beam.py'. Hint: make sure your test modules/packages have valid Python names. Traceback: /usr/local/lib/python3.8/importlib/__init__.py:127: in import_module return _bootstrap._gcd_import(name[level:], package, level) - tests/providers/apache/beam/operators/test_beam.py:25: in + providers/tests/apache/beam/operators/test_beam.py:25: in from airflow.providers.apache.beam.operators.beam import ( airflow/providers/apache/beam/operators/beam.py:35: in from airflow.providers.google.cloud.hooks.dataflow import ( airflow/providers/google/cloud/hooks/dataflow.py:32: in from google.cloud.dataflow_v1beta3 import GetJobRequest, Job, JobState, JobsV1Beta3AsyncClient, JobView E ModuleNotFoundError: No module named 'google.cloud.dataflow_v1beta3' - _ ERROR collecting tests/providers/microsoft/azure/transfers/test_azure_blob_to_gcs.py _ + _ ERROR collecting providers/tests/microsoft/azure/transfers/test_azure_blob_to_gcs.py _ -The fix is to add this line at the top of the ``tests/providers/apache/beam/operators/test_beam.py`` module: +The fix is to add this line at the top of the ``providers/tests/apache/beam/operators/test_beam.py`` module: .. code-block:: python diff --git a/airflow/providers/airbyte/.latest-doc-only-change.txt b/providers/src/airflow/providers/airbyte/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/airbyte/.latest-doc-only-change.txt rename to providers/src/airflow/providers/airbyte/.latest-doc-only-change.txt diff --git a/airflow/providers/airbyte/CHANGELOG.rst b/providers/src/airflow/providers/airbyte/CHANGELOG.rst similarity index 100% rename from airflow/providers/airbyte/CHANGELOG.rst rename to providers/src/airflow/providers/airbyte/CHANGELOG.rst diff --git a/airflow/providers/airbyte/__init__.py b/providers/src/airflow/providers/airbyte/__init__.py similarity index 100% rename from airflow/providers/airbyte/__init__.py rename to providers/src/airflow/providers/airbyte/__init__.py diff --git a/airflow/providers/airbyte/hooks/__init__.py b/providers/src/airflow/providers/airbyte/hooks/__init__.py similarity index 100% rename from airflow/providers/airbyte/hooks/__init__.py rename to providers/src/airflow/providers/airbyte/hooks/__init__.py diff --git a/airflow/providers/airbyte/hooks/airbyte.py b/providers/src/airflow/providers/airbyte/hooks/airbyte.py similarity index 100% rename from airflow/providers/airbyte/hooks/airbyte.py rename to providers/src/airflow/providers/airbyte/hooks/airbyte.py diff --git a/airflow/providers/airbyte/operators/__init__.py b/providers/src/airflow/providers/airbyte/operators/__init__.py similarity index 100% rename from airflow/providers/airbyte/operators/__init__.py rename to providers/src/airflow/providers/airbyte/operators/__init__.py diff --git a/airflow/providers/airbyte/operators/airbyte.py b/providers/src/airflow/providers/airbyte/operators/airbyte.py similarity index 100% rename from airflow/providers/airbyte/operators/airbyte.py rename to providers/src/airflow/providers/airbyte/operators/airbyte.py diff --git a/airflow/providers/airbyte/provider.yaml b/providers/src/airflow/providers/airbyte/provider.yaml similarity index 100% rename from airflow/providers/airbyte/provider.yaml rename to providers/src/airflow/providers/airbyte/provider.yaml diff --git a/airflow/providers/alibaba/cloud/operators/__init__.py b/providers/src/airflow/providers/airbyte/sensors/__init__.py similarity index 100% rename from airflow/providers/alibaba/cloud/operators/__init__.py rename to providers/src/airflow/providers/airbyte/sensors/__init__.py diff --git a/airflow/providers/airbyte/sensors/airbyte.py b/providers/src/airflow/providers/airbyte/sensors/airbyte.py similarity index 100% rename from airflow/providers/airbyte/sensors/airbyte.py rename to providers/src/airflow/providers/airbyte/sensors/airbyte.py diff --git a/airflow/providers/alibaba/cloud/sensors/__init__.py b/providers/src/airflow/providers/airbyte/triggers/__init__.py similarity index 100% rename from airflow/providers/alibaba/cloud/sensors/__init__.py rename to providers/src/airflow/providers/airbyte/triggers/__init__.py diff --git a/airflow/providers/airbyte/triggers/airbyte.py b/providers/src/airflow/providers/airbyte/triggers/airbyte.py similarity index 100% rename from airflow/providers/airbyte/triggers/airbyte.py rename to providers/src/airflow/providers/airbyte/triggers/airbyte.py diff --git a/airflow/providers/alibaba/.latest-doc-only-change.txt b/providers/src/airflow/providers/alibaba/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/alibaba/.latest-doc-only-change.txt rename to providers/src/airflow/providers/alibaba/.latest-doc-only-change.txt diff --git a/airflow/providers/alibaba/CHANGELOG.rst b/providers/src/airflow/providers/alibaba/CHANGELOG.rst similarity index 100% rename from airflow/providers/alibaba/CHANGELOG.rst rename to providers/src/airflow/providers/alibaba/CHANGELOG.rst diff --git a/airflow/providers/alibaba/__init__.py b/providers/src/airflow/providers/alibaba/__init__.py similarity index 100% rename from airflow/providers/alibaba/__init__.py rename to providers/src/airflow/providers/alibaba/__init__.py diff --git a/airflow/providers/alibaba/cloud/__init__.py b/providers/src/airflow/providers/alibaba/cloud/__init__.py similarity index 100% rename from airflow/providers/alibaba/cloud/__init__.py rename to providers/src/airflow/providers/alibaba/cloud/__init__.py diff --git a/airflow/providers/alibaba/cloud/hooks/__init__.py b/providers/src/airflow/providers/alibaba/cloud/hooks/__init__.py similarity index 100% rename from airflow/providers/alibaba/cloud/hooks/__init__.py rename to providers/src/airflow/providers/alibaba/cloud/hooks/__init__.py diff --git a/airflow/providers/alibaba/cloud/hooks/analyticdb_spark.py b/providers/src/airflow/providers/alibaba/cloud/hooks/analyticdb_spark.py similarity index 100% rename from airflow/providers/alibaba/cloud/hooks/analyticdb_spark.py rename to providers/src/airflow/providers/alibaba/cloud/hooks/analyticdb_spark.py diff --git a/airflow/providers/alibaba/cloud/hooks/oss.py b/providers/src/airflow/providers/alibaba/cloud/hooks/oss.py similarity index 100% rename from airflow/providers/alibaba/cloud/hooks/oss.py rename to providers/src/airflow/providers/alibaba/cloud/hooks/oss.py diff --git a/airflow/providers/amazon/aws/__init__.py b/providers/src/airflow/providers/alibaba/cloud/log/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/__init__.py rename to providers/src/airflow/providers/alibaba/cloud/log/__init__.py diff --git a/airflow/providers/alibaba/cloud/log/oss_task_handler.py b/providers/src/airflow/providers/alibaba/cloud/log/oss_task_handler.py similarity index 100% rename from airflow/providers/alibaba/cloud/log/oss_task_handler.py rename to providers/src/airflow/providers/alibaba/cloud/log/oss_task_handler.py diff --git a/airflow/providers/amazon/aws/assets/__init__.py b/providers/src/airflow/providers/alibaba/cloud/operators/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/assets/__init__.py rename to providers/src/airflow/providers/alibaba/cloud/operators/__init__.py diff --git a/airflow/providers/alibaba/cloud/operators/analyticdb_spark.py b/providers/src/airflow/providers/alibaba/cloud/operators/analyticdb_spark.py similarity index 100% rename from airflow/providers/alibaba/cloud/operators/analyticdb_spark.py rename to providers/src/airflow/providers/alibaba/cloud/operators/analyticdb_spark.py diff --git a/airflow/providers/alibaba/cloud/operators/oss.py b/providers/src/airflow/providers/alibaba/cloud/operators/oss.py similarity index 100% rename from airflow/providers/alibaba/cloud/operators/oss.py rename to providers/src/airflow/providers/alibaba/cloud/operators/oss.py diff --git a/airflow/providers/amazon/aws/auth_manager/__init__.py b/providers/src/airflow/providers/alibaba/cloud/sensors/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/__init__.py rename to providers/src/airflow/providers/alibaba/cloud/sensors/__init__.py diff --git a/airflow/providers/alibaba/cloud/sensors/analyticdb_spark.py b/providers/src/airflow/providers/alibaba/cloud/sensors/analyticdb_spark.py similarity index 100% rename from airflow/providers/alibaba/cloud/sensors/analyticdb_spark.py rename to providers/src/airflow/providers/alibaba/cloud/sensors/analyticdb_spark.py diff --git a/airflow/providers/alibaba/cloud/sensors/oss_key.py b/providers/src/airflow/providers/alibaba/cloud/sensors/oss_key.py similarity index 100% rename from airflow/providers/alibaba/cloud/sensors/oss_key.py rename to providers/src/airflow/providers/alibaba/cloud/sensors/oss_key.py diff --git a/airflow/providers/alibaba/provider.yaml b/providers/src/airflow/providers/alibaba/provider.yaml similarity index 100% rename from airflow/providers/alibaba/provider.yaml rename to providers/src/airflow/providers/alibaba/provider.yaml diff --git a/airflow/providers/amazon/.latest-doc-only-change.txt b/providers/src/airflow/providers/amazon/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/amazon/.latest-doc-only-change.txt rename to providers/src/airflow/providers/amazon/.latest-doc-only-change.txt diff --git a/airflow/providers/amazon/CHANGELOG.rst b/providers/src/airflow/providers/amazon/CHANGELOG.rst similarity index 100% rename from airflow/providers/amazon/CHANGELOG.rst rename to providers/src/airflow/providers/amazon/CHANGELOG.rst diff --git a/airflow/providers/amazon/__init__.py b/providers/src/airflow/providers/amazon/__init__.py similarity index 100% rename from airflow/providers/amazon/__init__.py rename to providers/src/airflow/providers/amazon/__init__.py diff --git a/airflow/providers/amazon/aws/auth_manager/avp/__init__.py b/providers/src/airflow/providers/amazon/aws/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/avp/__init__.py rename to providers/src/airflow/providers/amazon/aws/__init__.py diff --git a/airflow/providers/amazon/aws/auth_manager/cli/__init__.py b/providers/src/airflow/providers/amazon/aws/assets/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/cli/__init__.py rename to providers/src/airflow/providers/amazon/aws/assets/__init__.py diff --git a/airflow/providers/amazon/aws/assets/s3.py b/providers/src/airflow/providers/amazon/aws/assets/s3.py similarity index 100% rename from airflow/providers/amazon/aws/assets/s3.py rename to providers/src/airflow/providers/amazon/aws/assets/s3.py diff --git a/airflow/providers/amazon/aws/auth_manager/security_manager/__init__.py b/providers/src/airflow/providers/amazon/aws/auth_manager/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/security_manager/__init__.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/__init__.py diff --git a/airflow/providers/amazon/aws/auth_manager/views/__init__.py b/providers/src/airflow/providers/amazon/aws/auth_manager/avp/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/views/__init__.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/avp/__init__.py diff --git a/airflow/providers/amazon/aws/auth_manager/avp/entities.py b/providers/src/airflow/providers/amazon/aws/auth_manager/avp/entities.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/avp/entities.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/avp/entities.py diff --git a/airflow/providers/amazon/aws/auth_manager/avp/facade.py b/providers/src/airflow/providers/amazon/aws/auth_manager/avp/facade.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/avp/facade.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/avp/facade.py diff --git a/airflow/providers/amazon/aws/auth_manager/avp/schema.json b/providers/src/airflow/providers/amazon/aws/auth_manager/avp/schema.json similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/avp/schema.json rename to providers/src/airflow/providers/amazon/aws/auth_manager/avp/schema.json diff --git a/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py b/providers/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/aws_auth_manager.py diff --git a/airflow/providers/amazon/aws/executors/__init__.py b/providers/src/airflow/providers/amazon/aws/auth_manager/cli/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/executors/__init__.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/cli/__init__.py diff --git a/airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py b/providers/src/airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/cli/avp_commands.py diff --git a/airflow/providers/amazon/aws/auth_manager/cli/definition.py b/providers/src/airflow/providers/amazon/aws/auth_manager/cli/definition.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/cli/definition.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/cli/definition.py diff --git a/airflow/providers/amazon/aws/auth_manager/constants.py b/providers/src/airflow/providers/amazon/aws/auth_manager/constants.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/constants.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/constants.py diff --git a/airflow/providers/amazon/aws/executors/utils/__init__.py b/providers/src/airflow/providers/amazon/aws/auth_manager/security_manager/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/executors/utils/__init__.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/security_manager/__init__.py diff --git a/airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py b/providers/src/airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/security_manager/aws_security_manager_override.py diff --git a/airflow/providers/amazon/aws/auth_manager/user.py b/providers/src/airflow/providers/amazon/aws/auth_manager/user.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/user.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/user.py diff --git a/airflow/providers/amazon/aws/fs/__init__.py b/providers/src/airflow/providers/amazon/aws/auth_manager/views/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/fs/__init__.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/views/__init__.py diff --git a/airflow/providers/amazon/aws/auth_manager/views/auth.py b/providers/src/airflow/providers/amazon/aws/auth_manager/views/auth.py similarity index 100% rename from airflow/providers/amazon/aws/auth_manager/views/auth.py rename to providers/src/airflow/providers/amazon/aws/auth_manager/views/auth.py diff --git a/airflow/providers/amazon/aws/exceptions.py b/providers/src/airflow/providers/amazon/aws/exceptions.py similarity index 100% rename from airflow/providers/amazon/aws/exceptions.py rename to providers/src/airflow/providers/amazon/aws/exceptions.py diff --git a/airflow/providers/amazon/aws/executors/Dockerfile b/providers/src/airflow/providers/amazon/aws/executors/Dockerfile similarity index 100% rename from airflow/providers/amazon/aws/executors/Dockerfile rename to providers/src/airflow/providers/amazon/aws/executors/Dockerfile diff --git a/airflow/providers/amazon/aws/hooks/__init__.py b/providers/src/airflow/providers/amazon/aws/executors/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/__init__.py rename to providers/src/airflow/providers/amazon/aws/executors/__init__.py diff --git a/airflow/providers/amazon/aws/executors/batch/__init__.py b/providers/src/airflow/providers/amazon/aws/executors/batch/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/executors/batch/__init__.py rename to providers/src/airflow/providers/amazon/aws/executors/batch/__init__.py diff --git a/airflow/providers/amazon/aws/executors/batch/batch_executor.py b/providers/src/airflow/providers/amazon/aws/executors/batch/batch_executor.py similarity index 100% rename from airflow/providers/amazon/aws/executors/batch/batch_executor.py rename to providers/src/airflow/providers/amazon/aws/executors/batch/batch_executor.py diff --git a/airflow/providers/amazon/aws/executors/batch/batch_executor_config.py b/providers/src/airflow/providers/amazon/aws/executors/batch/batch_executor_config.py similarity index 100% rename from airflow/providers/amazon/aws/executors/batch/batch_executor_config.py rename to providers/src/airflow/providers/amazon/aws/executors/batch/batch_executor_config.py diff --git a/airflow/providers/amazon/aws/executors/batch/boto_schema.py b/providers/src/airflow/providers/amazon/aws/executors/batch/boto_schema.py similarity index 100% rename from airflow/providers/amazon/aws/executors/batch/boto_schema.py rename to providers/src/airflow/providers/amazon/aws/executors/batch/boto_schema.py diff --git a/airflow/providers/amazon/aws/executors/batch/utils.py b/providers/src/airflow/providers/amazon/aws/executors/batch/utils.py similarity index 100% rename from airflow/providers/amazon/aws/executors/batch/utils.py rename to providers/src/airflow/providers/amazon/aws/executors/batch/utils.py diff --git a/airflow/providers/amazon/aws/executors/ecs/__init__.py b/providers/src/airflow/providers/amazon/aws/executors/ecs/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/executors/ecs/__init__.py rename to providers/src/airflow/providers/amazon/aws/executors/ecs/__init__.py diff --git a/airflow/providers/amazon/aws/executors/ecs/boto_schema.py b/providers/src/airflow/providers/amazon/aws/executors/ecs/boto_schema.py similarity index 100% rename from airflow/providers/amazon/aws/executors/ecs/boto_schema.py rename to providers/src/airflow/providers/amazon/aws/executors/ecs/boto_schema.py diff --git a/airflow/providers/amazon/aws/executors/ecs/ecs_executor.py b/providers/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor.py similarity index 100% rename from airflow/providers/amazon/aws/executors/ecs/ecs_executor.py rename to providers/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor.py diff --git a/airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py b/providers/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py similarity index 100% rename from airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py rename to providers/src/airflow/providers/amazon/aws/executors/ecs/ecs_executor_config.py diff --git a/airflow/providers/amazon/aws/executors/ecs/utils.py b/providers/src/airflow/providers/amazon/aws/executors/ecs/utils.py similarity index 100% rename from airflow/providers/amazon/aws/executors/ecs/utils.py rename to providers/src/airflow/providers/amazon/aws/executors/ecs/utils.py diff --git a/airflow/providers/amazon/aws/links/__init__.py b/providers/src/airflow/providers/amazon/aws/executors/utils/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/links/__init__.py rename to providers/src/airflow/providers/amazon/aws/executors/utils/__init__.py diff --git a/airflow/providers/amazon/aws/executors/utils/base_config_keys.py b/providers/src/airflow/providers/amazon/aws/executors/utils/base_config_keys.py similarity index 100% rename from airflow/providers/amazon/aws/executors/utils/base_config_keys.py rename to providers/src/airflow/providers/amazon/aws/executors/utils/base_config_keys.py diff --git a/airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py b/providers/src/airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py similarity index 100% rename from airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py rename to providers/src/airflow/providers/amazon/aws/executors/utils/exponential_backoff_retry.py diff --git a/airflow/providers/amazon/aws/log/__init__.py b/providers/src/airflow/providers/amazon/aws/fs/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/log/__init__.py rename to providers/src/airflow/providers/amazon/aws/fs/__init__.py diff --git a/airflow/providers/amazon/aws/fs/s3.py b/providers/src/airflow/providers/amazon/aws/fs/s3.py similarity index 100% rename from airflow/providers/amazon/aws/fs/s3.py rename to providers/src/airflow/providers/amazon/aws/fs/s3.py diff --git a/airflow/providers/amazon/aws/notifications/__init__.py b/providers/src/airflow/providers/amazon/aws/hooks/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/notifications/__init__.py rename to providers/src/airflow/providers/amazon/aws/hooks/__init__.py diff --git a/airflow/providers/amazon/aws/hooks/appflow.py b/providers/src/airflow/providers/amazon/aws/hooks/appflow.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/appflow.py rename to providers/src/airflow/providers/amazon/aws/hooks/appflow.py diff --git a/airflow/providers/amazon/aws/hooks/athena.py b/providers/src/airflow/providers/amazon/aws/hooks/athena.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/athena.py rename to providers/src/airflow/providers/amazon/aws/hooks/athena.py diff --git a/airflow/providers/amazon/aws/hooks/athena_sql.py b/providers/src/airflow/providers/amazon/aws/hooks/athena_sql.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/athena_sql.py rename to providers/src/airflow/providers/amazon/aws/hooks/athena_sql.py diff --git a/airflow/providers/amazon/aws/hooks/base_aws.py b/providers/src/airflow/providers/amazon/aws/hooks/base_aws.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/base_aws.py rename to providers/src/airflow/providers/amazon/aws/hooks/base_aws.py diff --git a/airflow/providers/amazon/aws/hooks/batch_client.py b/providers/src/airflow/providers/amazon/aws/hooks/batch_client.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/batch_client.py rename to providers/src/airflow/providers/amazon/aws/hooks/batch_client.py diff --git a/airflow/providers/amazon/aws/hooks/batch_waiters.json b/providers/src/airflow/providers/amazon/aws/hooks/batch_waiters.json similarity index 100% rename from airflow/providers/amazon/aws/hooks/batch_waiters.json rename to providers/src/airflow/providers/amazon/aws/hooks/batch_waiters.json diff --git a/airflow/providers/amazon/aws/hooks/batch_waiters.py b/providers/src/airflow/providers/amazon/aws/hooks/batch_waiters.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/batch_waiters.py rename to providers/src/airflow/providers/amazon/aws/hooks/batch_waiters.py diff --git a/airflow/providers/amazon/aws/hooks/bedrock.py b/providers/src/airflow/providers/amazon/aws/hooks/bedrock.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/bedrock.py rename to providers/src/airflow/providers/amazon/aws/hooks/bedrock.py diff --git a/airflow/providers/amazon/aws/hooks/chime.py b/providers/src/airflow/providers/amazon/aws/hooks/chime.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/chime.py rename to providers/src/airflow/providers/amazon/aws/hooks/chime.py diff --git a/airflow/providers/amazon/aws/hooks/cloud_formation.py b/providers/src/airflow/providers/amazon/aws/hooks/cloud_formation.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/cloud_formation.py rename to providers/src/airflow/providers/amazon/aws/hooks/cloud_formation.py diff --git a/airflow/providers/amazon/aws/hooks/comprehend.py b/providers/src/airflow/providers/amazon/aws/hooks/comprehend.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/comprehend.py rename to providers/src/airflow/providers/amazon/aws/hooks/comprehend.py diff --git a/airflow/providers/amazon/aws/hooks/datasync.py b/providers/src/airflow/providers/amazon/aws/hooks/datasync.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/datasync.py rename to providers/src/airflow/providers/amazon/aws/hooks/datasync.py diff --git a/airflow/providers/amazon/aws/hooks/dms.py b/providers/src/airflow/providers/amazon/aws/hooks/dms.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/dms.py rename to providers/src/airflow/providers/amazon/aws/hooks/dms.py diff --git a/airflow/providers/amazon/aws/hooks/dynamodb.py b/providers/src/airflow/providers/amazon/aws/hooks/dynamodb.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/dynamodb.py rename to providers/src/airflow/providers/amazon/aws/hooks/dynamodb.py diff --git a/airflow/providers/amazon/aws/hooks/ec2.py b/providers/src/airflow/providers/amazon/aws/hooks/ec2.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/ec2.py rename to providers/src/airflow/providers/amazon/aws/hooks/ec2.py diff --git a/airflow/providers/amazon/aws/hooks/ecr.py b/providers/src/airflow/providers/amazon/aws/hooks/ecr.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/ecr.py rename to providers/src/airflow/providers/amazon/aws/hooks/ecr.py diff --git a/airflow/providers/amazon/aws/hooks/ecs.py b/providers/src/airflow/providers/amazon/aws/hooks/ecs.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/ecs.py rename to providers/src/airflow/providers/amazon/aws/hooks/ecs.py diff --git a/airflow/providers/amazon/aws/hooks/eks.py b/providers/src/airflow/providers/amazon/aws/hooks/eks.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/eks.py rename to providers/src/airflow/providers/amazon/aws/hooks/eks.py diff --git a/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py b/providers/src/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/elasticache_replication_group.py rename to providers/src/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py diff --git a/airflow/providers/amazon/aws/hooks/emr.py b/providers/src/airflow/providers/amazon/aws/hooks/emr.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/emr.py rename to providers/src/airflow/providers/amazon/aws/hooks/emr.py diff --git a/airflow/providers/amazon/aws/hooks/eventbridge.py b/providers/src/airflow/providers/amazon/aws/hooks/eventbridge.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/eventbridge.py rename to providers/src/airflow/providers/amazon/aws/hooks/eventbridge.py diff --git a/airflow/providers/amazon/aws/hooks/glacier.py b/providers/src/airflow/providers/amazon/aws/hooks/glacier.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/glacier.py rename to providers/src/airflow/providers/amazon/aws/hooks/glacier.py diff --git a/airflow/providers/amazon/aws/hooks/glue.py b/providers/src/airflow/providers/amazon/aws/hooks/glue.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/glue.py rename to providers/src/airflow/providers/amazon/aws/hooks/glue.py diff --git a/airflow/providers/amazon/aws/hooks/glue_catalog.py b/providers/src/airflow/providers/amazon/aws/hooks/glue_catalog.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/glue_catalog.py rename to providers/src/airflow/providers/amazon/aws/hooks/glue_catalog.py diff --git a/airflow/providers/amazon/aws/hooks/glue_crawler.py b/providers/src/airflow/providers/amazon/aws/hooks/glue_crawler.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/glue_crawler.py rename to providers/src/airflow/providers/amazon/aws/hooks/glue_crawler.py diff --git a/airflow/providers/amazon/aws/hooks/glue_databrew.py b/providers/src/airflow/providers/amazon/aws/hooks/glue_databrew.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/glue_databrew.py rename to providers/src/airflow/providers/amazon/aws/hooks/glue_databrew.py diff --git a/airflow/providers/amazon/aws/hooks/kinesis.py b/providers/src/airflow/providers/amazon/aws/hooks/kinesis.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/kinesis.py rename to providers/src/airflow/providers/amazon/aws/hooks/kinesis.py diff --git a/airflow/providers/amazon/aws/hooks/kinesis_analytics.py b/providers/src/airflow/providers/amazon/aws/hooks/kinesis_analytics.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/kinesis_analytics.py rename to providers/src/airflow/providers/amazon/aws/hooks/kinesis_analytics.py diff --git a/airflow/providers/amazon/aws/hooks/lambda_function.py b/providers/src/airflow/providers/amazon/aws/hooks/lambda_function.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/lambda_function.py rename to providers/src/airflow/providers/amazon/aws/hooks/lambda_function.py diff --git a/airflow/providers/amazon/aws/hooks/logs.py b/providers/src/airflow/providers/amazon/aws/hooks/logs.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/logs.py rename to providers/src/airflow/providers/amazon/aws/hooks/logs.py diff --git a/airflow/providers/amazon/aws/hooks/neptune.py b/providers/src/airflow/providers/amazon/aws/hooks/neptune.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/neptune.py rename to providers/src/airflow/providers/amazon/aws/hooks/neptune.py diff --git a/airflow/providers/amazon/aws/hooks/opensearch_serverless.py b/providers/src/airflow/providers/amazon/aws/hooks/opensearch_serverless.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/opensearch_serverless.py rename to providers/src/airflow/providers/amazon/aws/hooks/opensearch_serverless.py diff --git a/airflow/providers/amazon/aws/hooks/quicksight.py b/providers/src/airflow/providers/amazon/aws/hooks/quicksight.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/quicksight.py rename to providers/src/airflow/providers/amazon/aws/hooks/quicksight.py diff --git a/airflow/providers/amazon/aws/hooks/rds.py b/providers/src/airflow/providers/amazon/aws/hooks/rds.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/rds.py rename to providers/src/airflow/providers/amazon/aws/hooks/rds.py diff --git a/airflow/providers/amazon/aws/hooks/redshift_cluster.py b/providers/src/airflow/providers/amazon/aws/hooks/redshift_cluster.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/redshift_cluster.py rename to providers/src/airflow/providers/amazon/aws/hooks/redshift_cluster.py diff --git a/airflow/providers/amazon/aws/hooks/redshift_data.py b/providers/src/airflow/providers/amazon/aws/hooks/redshift_data.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/redshift_data.py rename to providers/src/airflow/providers/amazon/aws/hooks/redshift_data.py diff --git a/airflow/providers/amazon/aws/hooks/redshift_sql.py b/providers/src/airflow/providers/amazon/aws/hooks/redshift_sql.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/redshift_sql.py rename to providers/src/airflow/providers/amazon/aws/hooks/redshift_sql.py diff --git a/airflow/providers/amazon/aws/hooks/s3.py b/providers/src/airflow/providers/amazon/aws/hooks/s3.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/s3.py rename to providers/src/airflow/providers/amazon/aws/hooks/s3.py diff --git a/airflow/providers/amazon/aws/hooks/sagemaker.py b/providers/src/airflow/providers/amazon/aws/hooks/sagemaker.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/sagemaker.py rename to providers/src/airflow/providers/amazon/aws/hooks/sagemaker.py diff --git a/airflow/providers/amazon/aws/hooks/secrets_manager.py b/providers/src/airflow/providers/amazon/aws/hooks/secrets_manager.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/secrets_manager.py rename to providers/src/airflow/providers/amazon/aws/hooks/secrets_manager.py diff --git a/airflow/providers/amazon/aws/hooks/ses.py b/providers/src/airflow/providers/amazon/aws/hooks/ses.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/ses.py rename to providers/src/airflow/providers/amazon/aws/hooks/ses.py diff --git a/airflow/providers/amazon/aws/hooks/sns.py b/providers/src/airflow/providers/amazon/aws/hooks/sns.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/sns.py rename to providers/src/airflow/providers/amazon/aws/hooks/sns.py diff --git a/airflow/providers/amazon/aws/hooks/sqs.py b/providers/src/airflow/providers/amazon/aws/hooks/sqs.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/sqs.py rename to providers/src/airflow/providers/amazon/aws/hooks/sqs.py diff --git a/airflow/providers/amazon/aws/hooks/ssm.py b/providers/src/airflow/providers/amazon/aws/hooks/ssm.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/ssm.py rename to providers/src/airflow/providers/amazon/aws/hooks/ssm.py diff --git a/airflow/providers/amazon/aws/hooks/step_function.py b/providers/src/airflow/providers/amazon/aws/hooks/step_function.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/step_function.py rename to providers/src/airflow/providers/amazon/aws/hooks/step_function.py diff --git a/airflow/providers/amazon/aws/hooks/sts.py b/providers/src/airflow/providers/amazon/aws/hooks/sts.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/sts.py rename to providers/src/airflow/providers/amazon/aws/hooks/sts.py diff --git a/airflow/providers/amazon/aws/hooks/verified_permissions.py b/providers/src/airflow/providers/amazon/aws/hooks/verified_permissions.py similarity index 100% rename from airflow/providers/amazon/aws/hooks/verified_permissions.py rename to providers/src/airflow/providers/amazon/aws/hooks/verified_permissions.py diff --git a/airflow/providers/amazon/aws/operators/__init__.py b/providers/src/airflow/providers/amazon/aws/links/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/operators/__init__.py rename to providers/src/airflow/providers/amazon/aws/links/__init__.py diff --git a/airflow/providers/amazon/aws/links/athena.py b/providers/src/airflow/providers/amazon/aws/links/athena.py similarity index 100% rename from airflow/providers/amazon/aws/links/athena.py rename to providers/src/airflow/providers/amazon/aws/links/athena.py diff --git a/airflow/providers/amazon/aws/links/base_aws.py b/providers/src/airflow/providers/amazon/aws/links/base_aws.py similarity index 100% rename from airflow/providers/amazon/aws/links/base_aws.py rename to providers/src/airflow/providers/amazon/aws/links/base_aws.py diff --git a/airflow/providers/amazon/aws/links/batch.py b/providers/src/airflow/providers/amazon/aws/links/batch.py similarity index 100% rename from airflow/providers/amazon/aws/links/batch.py rename to providers/src/airflow/providers/amazon/aws/links/batch.py diff --git a/airflow/providers/amazon/aws/links/emr.py b/providers/src/airflow/providers/amazon/aws/links/emr.py similarity index 100% rename from airflow/providers/amazon/aws/links/emr.py rename to providers/src/airflow/providers/amazon/aws/links/emr.py diff --git a/airflow/providers/amazon/aws/links/glue.py b/providers/src/airflow/providers/amazon/aws/links/glue.py similarity index 100% rename from airflow/providers/amazon/aws/links/glue.py rename to providers/src/airflow/providers/amazon/aws/links/glue.py diff --git a/airflow/providers/amazon/aws/links/logs.py b/providers/src/airflow/providers/amazon/aws/links/logs.py similarity index 100% rename from airflow/providers/amazon/aws/links/logs.py rename to providers/src/airflow/providers/amazon/aws/links/logs.py diff --git a/airflow/providers/amazon/aws/links/step_function.py b/providers/src/airflow/providers/amazon/aws/links/step_function.py similarity index 100% rename from airflow/providers/amazon/aws/links/step_function.py rename to providers/src/airflow/providers/amazon/aws/links/step_function.py diff --git a/airflow/providers/amazon/aws/secrets/__init__.py b/providers/src/airflow/providers/amazon/aws/log/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/secrets/__init__.py rename to providers/src/airflow/providers/amazon/aws/log/__init__.py diff --git a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/providers/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py similarity index 100% rename from airflow/providers/amazon/aws/log/cloudwatch_task_handler.py rename to providers/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py b/providers/src/airflow/providers/amazon/aws/log/s3_task_handler.py similarity index 100% rename from airflow/providers/amazon/aws/log/s3_task_handler.py rename to providers/src/airflow/providers/amazon/aws/log/s3_task_handler.py diff --git a/airflow/providers/amazon/aws/sensors/__init__.py b/providers/src/airflow/providers/amazon/aws/notifications/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/__init__.py rename to providers/src/airflow/providers/amazon/aws/notifications/__init__.py diff --git a/airflow/providers/amazon/aws/notifications/chime.py b/providers/src/airflow/providers/amazon/aws/notifications/chime.py similarity index 100% rename from airflow/providers/amazon/aws/notifications/chime.py rename to providers/src/airflow/providers/amazon/aws/notifications/chime.py diff --git a/airflow/providers/amazon/aws/notifications/sns.py b/providers/src/airflow/providers/amazon/aws/notifications/sns.py similarity index 100% rename from airflow/providers/amazon/aws/notifications/sns.py rename to providers/src/airflow/providers/amazon/aws/notifications/sns.py diff --git a/airflow/providers/amazon/aws/notifications/sqs.py b/providers/src/airflow/providers/amazon/aws/notifications/sqs.py similarity index 100% rename from airflow/providers/amazon/aws/notifications/sqs.py rename to providers/src/airflow/providers/amazon/aws/notifications/sqs.py diff --git a/airflow/providers/amazon/aws/transfers/__init__.py b/providers/src/airflow/providers/amazon/aws/operators/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/__init__.py rename to providers/src/airflow/providers/amazon/aws/operators/__init__.py diff --git a/airflow/providers/amazon/aws/operators/appflow.py b/providers/src/airflow/providers/amazon/aws/operators/appflow.py similarity index 100% rename from airflow/providers/amazon/aws/operators/appflow.py rename to providers/src/airflow/providers/amazon/aws/operators/appflow.py diff --git a/airflow/providers/amazon/aws/operators/athena.py b/providers/src/airflow/providers/amazon/aws/operators/athena.py similarity index 100% rename from airflow/providers/amazon/aws/operators/athena.py rename to providers/src/airflow/providers/amazon/aws/operators/athena.py diff --git a/airflow/providers/amazon/aws/operators/base_aws.py b/providers/src/airflow/providers/amazon/aws/operators/base_aws.py similarity index 100% rename from airflow/providers/amazon/aws/operators/base_aws.py rename to providers/src/airflow/providers/amazon/aws/operators/base_aws.py diff --git a/airflow/providers/amazon/aws/operators/batch.py b/providers/src/airflow/providers/amazon/aws/operators/batch.py similarity index 100% rename from airflow/providers/amazon/aws/operators/batch.py rename to providers/src/airflow/providers/amazon/aws/operators/batch.py diff --git a/airflow/providers/amazon/aws/operators/bedrock.py b/providers/src/airflow/providers/amazon/aws/operators/bedrock.py similarity index 100% rename from airflow/providers/amazon/aws/operators/bedrock.py rename to providers/src/airflow/providers/amazon/aws/operators/bedrock.py diff --git a/airflow/providers/amazon/aws/operators/cloud_formation.py b/providers/src/airflow/providers/amazon/aws/operators/cloud_formation.py similarity index 100% rename from airflow/providers/amazon/aws/operators/cloud_formation.py rename to providers/src/airflow/providers/amazon/aws/operators/cloud_formation.py diff --git a/airflow/providers/amazon/aws/operators/comprehend.py b/providers/src/airflow/providers/amazon/aws/operators/comprehend.py similarity index 100% rename from airflow/providers/amazon/aws/operators/comprehend.py rename to providers/src/airflow/providers/amazon/aws/operators/comprehend.py diff --git a/airflow/providers/amazon/aws/operators/datasync.py b/providers/src/airflow/providers/amazon/aws/operators/datasync.py similarity index 100% rename from airflow/providers/amazon/aws/operators/datasync.py rename to providers/src/airflow/providers/amazon/aws/operators/datasync.py diff --git a/airflow/providers/amazon/aws/operators/dms.py b/providers/src/airflow/providers/amazon/aws/operators/dms.py similarity index 100% rename from airflow/providers/amazon/aws/operators/dms.py rename to providers/src/airflow/providers/amazon/aws/operators/dms.py diff --git a/airflow/providers/amazon/aws/operators/ec2.py b/providers/src/airflow/providers/amazon/aws/operators/ec2.py similarity index 100% rename from airflow/providers/amazon/aws/operators/ec2.py rename to providers/src/airflow/providers/amazon/aws/operators/ec2.py diff --git a/airflow/providers/amazon/aws/operators/ecs.py b/providers/src/airflow/providers/amazon/aws/operators/ecs.py similarity index 100% rename from airflow/providers/amazon/aws/operators/ecs.py rename to providers/src/airflow/providers/amazon/aws/operators/ecs.py diff --git a/airflow/providers/amazon/aws/operators/eks.py b/providers/src/airflow/providers/amazon/aws/operators/eks.py similarity index 100% rename from airflow/providers/amazon/aws/operators/eks.py rename to providers/src/airflow/providers/amazon/aws/operators/eks.py diff --git a/airflow/providers/amazon/aws/operators/emr.py b/providers/src/airflow/providers/amazon/aws/operators/emr.py similarity index 100% rename from airflow/providers/amazon/aws/operators/emr.py rename to providers/src/airflow/providers/amazon/aws/operators/emr.py diff --git a/airflow/providers/amazon/aws/operators/eventbridge.py b/providers/src/airflow/providers/amazon/aws/operators/eventbridge.py similarity index 100% rename from airflow/providers/amazon/aws/operators/eventbridge.py rename to providers/src/airflow/providers/amazon/aws/operators/eventbridge.py diff --git a/airflow/providers/amazon/aws/operators/glacier.py b/providers/src/airflow/providers/amazon/aws/operators/glacier.py similarity index 100% rename from airflow/providers/amazon/aws/operators/glacier.py rename to providers/src/airflow/providers/amazon/aws/operators/glacier.py diff --git a/airflow/providers/amazon/aws/operators/glue.py b/providers/src/airflow/providers/amazon/aws/operators/glue.py similarity index 100% rename from airflow/providers/amazon/aws/operators/glue.py rename to providers/src/airflow/providers/amazon/aws/operators/glue.py diff --git a/airflow/providers/amazon/aws/operators/glue_crawler.py b/providers/src/airflow/providers/amazon/aws/operators/glue_crawler.py similarity index 100% rename from airflow/providers/amazon/aws/operators/glue_crawler.py rename to providers/src/airflow/providers/amazon/aws/operators/glue_crawler.py diff --git a/airflow/providers/amazon/aws/operators/glue_databrew.py b/providers/src/airflow/providers/amazon/aws/operators/glue_databrew.py similarity index 100% rename from airflow/providers/amazon/aws/operators/glue_databrew.py rename to providers/src/airflow/providers/amazon/aws/operators/glue_databrew.py diff --git a/airflow/providers/amazon/aws/operators/kinesis_analytics.py b/providers/src/airflow/providers/amazon/aws/operators/kinesis_analytics.py similarity index 100% rename from airflow/providers/amazon/aws/operators/kinesis_analytics.py rename to providers/src/airflow/providers/amazon/aws/operators/kinesis_analytics.py diff --git a/airflow/providers/amazon/aws/operators/lambda_function.py b/providers/src/airflow/providers/amazon/aws/operators/lambda_function.py similarity index 100% rename from airflow/providers/amazon/aws/operators/lambda_function.py rename to providers/src/airflow/providers/amazon/aws/operators/lambda_function.py diff --git a/airflow/providers/amazon/aws/operators/neptune.py b/providers/src/airflow/providers/amazon/aws/operators/neptune.py similarity index 100% rename from airflow/providers/amazon/aws/operators/neptune.py rename to providers/src/airflow/providers/amazon/aws/operators/neptune.py diff --git a/airflow/providers/amazon/aws/operators/quicksight.py b/providers/src/airflow/providers/amazon/aws/operators/quicksight.py similarity index 100% rename from airflow/providers/amazon/aws/operators/quicksight.py rename to providers/src/airflow/providers/amazon/aws/operators/quicksight.py diff --git a/airflow/providers/amazon/aws/operators/rds.py b/providers/src/airflow/providers/amazon/aws/operators/rds.py similarity index 100% rename from airflow/providers/amazon/aws/operators/rds.py rename to providers/src/airflow/providers/amazon/aws/operators/rds.py diff --git a/airflow/providers/amazon/aws/operators/redshift_cluster.py b/providers/src/airflow/providers/amazon/aws/operators/redshift_cluster.py similarity index 100% rename from airflow/providers/amazon/aws/operators/redshift_cluster.py rename to providers/src/airflow/providers/amazon/aws/operators/redshift_cluster.py diff --git a/airflow/providers/amazon/aws/operators/redshift_data.py b/providers/src/airflow/providers/amazon/aws/operators/redshift_data.py similarity index 100% rename from airflow/providers/amazon/aws/operators/redshift_data.py rename to providers/src/airflow/providers/amazon/aws/operators/redshift_data.py diff --git a/airflow/providers/amazon/aws/operators/s3.py b/providers/src/airflow/providers/amazon/aws/operators/s3.py similarity index 100% rename from airflow/providers/amazon/aws/operators/s3.py rename to providers/src/airflow/providers/amazon/aws/operators/s3.py diff --git a/airflow/providers/amazon/aws/operators/sagemaker.py b/providers/src/airflow/providers/amazon/aws/operators/sagemaker.py similarity index 100% rename from airflow/providers/amazon/aws/operators/sagemaker.py rename to providers/src/airflow/providers/amazon/aws/operators/sagemaker.py diff --git a/airflow/providers/amazon/aws/operators/sns.py b/providers/src/airflow/providers/amazon/aws/operators/sns.py similarity index 100% rename from airflow/providers/amazon/aws/operators/sns.py rename to providers/src/airflow/providers/amazon/aws/operators/sns.py diff --git a/airflow/providers/amazon/aws/operators/sqs.py b/providers/src/airflow/providers/amazon/aws/operators/sqs.py similarity index 100% rename from airflow/providers/amazon/aws/operators/sqs.py rename to providers/src/airflow/providers/amazon/aws/operators/sqs.py diff --git a/airflow/providers/amazon/aws/operators/step_function.py b/providers/src/airflow/providers/amazon/aws/operators/step_function.py similarity index 100% rename from airflow/providers/amazon/aws/operators/step_function.py rename to providers/src/airflow/providers/amazon/aws/operators/step_function.py diff --git a/airflow/providers/amazon/aws/waiters/__init__.py b/providers/src/airflow/providers/amazon/aws/secrets/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/waiters/__init__.py rename to providers/src/airflow/providers/amazon/aws/secrets/__init__.py diff --git a/airflow/providers/amazon/aws/secrets/secrets_manager.py b/providers/src/airflow/providers/amazon/aws/secrets/secrets_manager.py similarity index 100% rename from airflow/providers/amazon/aws/secrets/secrets_manager.py rename to providers/src/airflow/providers/amazon/aws/secrets/secrets_manager.py diff --git a/airflow/providers/amazon/aws/secrets/systems_manager.py b/providers/src/airflow/providers/amazon/aws/secrets/systems_manager.py similarity index 100% rename from airflow/providers/amazon/aws/secrets/systems_manager.py rename to providers/src/airflow/providers/amazon/aws/secrets/systems_manager.py diff --git a/airflow/providers/apache/__init__.py b/providers/src/airflow/providers/amazon/aws/sensors/__init__.py similarity index 100% rename from airflow/providers/apache/__init__.py rename to providers/src/airflow/providers/amazon/aws/sensors/__init__.py diff --git a/airflow/providers/amazon/aws/sensors/athena.py b/providers/src/airflow/providers/amazon/aws/sensors/athena.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/athena.py rename to providers/src/airflow/providers/amazon/aws/sensors/athena.py diff --git a/airflow/providers/amazon/aws/sensors/base_aws.py b/providers/src/airflow/providers/amazon/aws/sensors/base_aws.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/base_aws.py rename to providers/src/airflow/providers/amazon/aws/sensors/base_aws.py diff --git a/airflow/providers/amazon/aws/sensors/batch.py b/providers/src/airflow/providers/amazon/aws/sensors/batch.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/batch.py rename to providers/src/airflow/providers/amazon/aws/sensors/batch.py diff --git a/airflow/providers/amazon/aws/sensors/bedrock.py b/providers/src/airflow/providers/amazon/aws/sensors/bedrock.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/bedrock.py rename to providers/src/airflow/providers/amazon/aws/sensors/bedrock.py diff --git a/airflow/providers/amazon/aws/sensors/cloud_formation.py b/providers/src/airflow/providers/amazon/aws/sensors/cloud_formation.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/cloud_formation.py rename to providers/src/airflow/providers/amazon/aws/sensors/cloud_formation.py diff --git a/airflow/providers/amazon/aws/sensors/comprehend.py b/providers/src/airflow/providers/amazon/aws/sensors/comprehend.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/comprehend.py rename to providers/src/airflow/providers/amazon/aws/sensors/comprehend.py diff --git a/airflow/providers/amazon/aws/sensors/dms.py b/providers/src/airflow/providers/amazon/aws/sensors/dms.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/dms.py rename to providers/src/airflow/providers/amazon/aws/sensors/dms.py diff --git a/airflow/providers/amazon/aws/sensors/dynamodb.py b/providers/src/airflow/providers/amazon/aws/sensors/dynamodb.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/dynamodb.py rename to providers/src/airflow/providers/amazon/aws/sensors/dynamodb.py diff --git a/airflow/providers/amazon/aws/sensors/ec2.py b/providers/src/airflow/providers/amazon/aws/sensors/ec2.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/ec2.py rename to providers/src/airflow/providers/amazon/aws/sensors/ec2.py diff --git a/airflow/providers/amazon/aws/sensors/ecs.py b/providers/src/airflow/providers/amazon/aws/sensors/ecs.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/ecs.py rename to providers/src/airflow/providers/amazon/aws/sensors/ecs.py diff --git a/airflow/providers/amazon/aws/sensors/eks.py b/providers/src/airflow/providers/amazon/aws/sensors/eks.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/eks.py rename to providers/src/airflow/providers/amazon/aws/sensors/eks.py diff --git a/airflow/providers/amazon/aws/sensors/emr.py b/providers/src/airflow/providers/amazon/aws/sensors/emr.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/emr.py rename to providers/src/airflow/providers/amazon/aws/sensors/emr.py diff --git a/airflow/providers/amazon/aws/sensors/glacier.py b/providers/src/airflow/providers/amazon/aws/sensors/glacier.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/glacier.py rename to providers/src/airflow/providers/amazon/aws/sensors/glacier.py diff --git a/airflow/providers/amazon/aws/sensors/glue.py b/providers/src/airflow/providers/amazon/aws/sensors/glue.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/glue.py rename to providers/src/airflow/providers/amazon/aws/sensors/glue.py diff --git a/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py b/providers/src/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/glue_catalog_partition.py rename to providers/src/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py diff --git a/airflow/providers/amazon/aws/sensors/glue_crawler.py b/providers/src/airflow/providers/amazon/aws/sensors/glue_crawler.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/glue_crawler.py rename to providers/src/airflow/providers/amazon/aws/sensors/glue_crawler.py diff --git a/airflow/providers/amazon/aws/sensors/kinesis_analytics.py b/providers/src/airflow/providers/amazon/aws/sensors/kinesis_analytics.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/kinesis_analytics.py rename to providers/src/airflow/providers/amazon/aws/sensors/kinesis_analytics.py diff --git a/airflow/providers/amazon/aws/sensors/lambda_function.py b/providers/src/airflow/providers/amazon/aws/sensors/lambda_function.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/lambda_function.py rename to providers/src/airflow/providers/amazon/aws/sensors/lambda_function.py diff --git a/airflow/providers/amazon/aws/sensors/opensearch_serverless.py b/providers/src/airflow/providers/amazon/aws/sensors/opensearch_serverless.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/opensearch_serverless.py rename to providers/src/airflow/providers/amazon/aws/sensors/opensearch_serverless.py diff --git a/airflow/providers/amazon/aws/sensors/quicksight.py b/providers/src/airflow/providers/amazon/aws/sensors/quicksight.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/quicksight.py rename to providers/src/airflow/providers/amazon/aws/sensors/quicksight.py diff --git a/airflow/providers/amazon/aws/sensors/rds.py b/providers/src/airflow/providers/amazon/aws/sensors/rds.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/rds.py rename to providers/src/airflow/providers/amazon/aws/sensors/rds.py diff --git a/airflow/providers/amazon/aws/sensors/redshift_cluster.py b/providers/src/airflow/providers/amazon/aws/sensors/redshift_cluster.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/redshift_cluster.py rename to providers/src/airflow/providers/amazon/aws/sensors/redshift_cluster.py diff --git a/airflow/providers/amazon/aws/sensors/s3.py b/providers/src/airflow/providers/amazon/aws/sensors/s3.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/s3.py rename to providers/src/airflow/providers/amazon/aws/sensors/s3.py diff --git a/airflow/providers/amazon/aws/sensors/sagemaker.py b/providers/src/airflow/providers/amazon/aws/sensors/sagemaker.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/sagemaker.py rename to providers/src/airflow/providers/amazon/aws/sensors/sagemaker.py diff --git a/airflow/providers/amazon/aws/sensors/sqs.py b/providers/src/airflow/providers/amazon/aws/sensors/sqs.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/sqs.py rename to providers/src/airflow/providers/amazon/aws/sensors/sqs.py diff --git a/airflow/providers/amazon/aws/sensors/step_function.py b/providers/src/airflow/providers/amazon/aws/sensors/step_function.py similarity index 100% rename from airflow/providers/amazon/aws/sensors/step_function.py rename to providers/src/airflow/providers/amazon/aws/sensors/step_function.py diff --git a/airflow/providers/apache/beam/triggers/__init__.py b/providers/src/airflow/providers/amazon/aws/transfers/__init__.py similarity index 100% rename from airflow/providers/apache/beam/triggers/__init__.py rename to providers/src/airflow/providers/amazon/aws/transfers/__init__.py diff --git a/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/base.py b/providers/src/airflow/providers/amazon/aws/transfers/base.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/base.py rename to providers/src/airflow/providers/amazon/aws/transfers/base.py diff --git a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/exasol_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/exasol_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/exasol_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/exasol_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/ftp_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/ftp_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/ftp_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/ftp_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/gcs_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/gcs_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/gcs_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/gcs_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py b/providers/src/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/glacier_to_gcs.py rename to providers/src/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py diff --git a/airflow/providers/amazon/aws/transfers/google_api_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/google_api_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/google_api_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/google_api_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py b/providers/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py rename to providers/src/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py diff --git a/airflow/providers/amazon/aws/transfers/http_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/http_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/http_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/http_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/local_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/local_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/local_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/local_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/mongo_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/mongo_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/mongo_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/mongo_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/redshift_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/redshift_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/redshift_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py b/providers/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py rename to providers/src/airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py diff --git a/airflow/providers/amazon/aws/transfers/s3_to_ftp.py b/providers/src/airflow/providers/amazon/aws/transfers/s3_to_ftp.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/s3_to_ftp.py rename to providers/src/airflow/providers/amazon/aws/transfers/s3_to_ftp.py diff --git a/airflow/providers/amazon/aws/transfers/s3_to_redshift.py b/providers/src/airflow/providers/amazon/aws/transfers/s3_to_redshift.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/s3_to_redshift.py rename to providers/src/airflow/providers/amazon/aws/transfers/s3_to_redshift.py diff --git a/airflow/providers/amazon/aws/transfers/s3_to_sftp.py b/providers/src/airflow/providers/amazon/aws/transfers/s3_to_sftp.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/s3_to_sftp.py rename to providers/src/airflow/providers/amazon/aws/transfers/s3_to_sftp.py diff --git a/airflow/providers/amazon/aws/transfers/s3_to_sql.py b/providers/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/s3_to_sql.py rename to providers/src/airflow/providers/amazon/aws/transfers/s3_to_sql.py diff --git a/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/salesforce_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/sftp_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/sftp_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/sftp_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/sftp_to_s3.py diff --git a/airflow/providers/amazon/aws/transfers/sql_to_s3.py b/providers/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/transfers/sql_to_s3.py rename to providers/src/airflow/providers/amazon/aws/transfers/sql_to_s3.py diff --git a/airflow/providers/amazon/aws/triggers/README.md b/providers/src/airflow/providers/amazon/aws/triggers/README.md similarity index 100% rename from airflow/providers/amazon/aws/triggers/README.md rename to providers/src/airflow/providers/amazon/aws/triggers/README.md diff --git a/airflow/providers/amazon/aws/triggers/__init__.py b/providers/src/airflow/providers/amazon/aws/triggers/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/__init__.py rename to providers/src/airflow/providers/amazon/aws/triggers/__init__.py diff --git a/airflow/providers/amazon/aws/triggers/athena.py b/providers/src/airflow/providers/amazon/aws/triggers/athena.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/athena.py rename to providers/src/airflow/providers/amazon/aws/triggers/athena.py diff --git a/airflow/providers/amazon/aws/triggers/base.py b/providers/src/airflow/providers/amazon/aws/triggers/base.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/base.py rename to providers/src/airflow/providers/amazon/aws/triggers/base.py diff --git a/airflow/providers/amazon/aws/triggers/batch.py b/providers/src/airflow/providers/amazon/aws/triggers/batch.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/batch.py rename to providers/src/airflow/providers/amazon/aws/triggers/batch.py diff --git a/airflow/providers/amazon/aws/triggers/bedrock.py b/providers/src/airflow/providers/amazon/aws/triggers/bedrock.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/bedrock.py rename to providers/src/airflow/providers/amazon/aws/triggers/bedrock.py diff --git a/airflow/providers/amazon/aws/triggers/comprehend.py b/providers/src/airflow/providers/amazon/aws/triggers/comprehend.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/comprehend.py rename to providers/src/airflow/providers/amazon/aws/triggers/comprehend.py diff --git a/airflow/providers/amazon/aws/triggers/ec2.py b/providers/src/airflow/providers/amazon/aws/triggers/ec2.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/ec2.py rename to providers/src/airflow/providers/amazon/aws/triggers/ec2.py diff --git a/airflow/providers/amazon/aws/triggers/ecs.py b/providers/src/airflow/providers/amazon/aws/triggers/ecs.py similarity index 96% rename from airflow/providers/amazon/aws/triggers/ecs.py rename to providers/src/airflow/providers/amazon/aws/triggers/ecs.py index dd86899f22006..11c9cf18043c3 100644 --- a/airflow/providers/amazon/aws/triggers/ecs.py +++ b/providers/src/airflow/providers/amazon/aws/triggers/ecs.py @@ -18,7 +18,8 @@ from __future__ import annotations import asyncio -from typing import TYPE_CHECKING, Any, AsyncIterator +from collections.abc import AsyncIterator +from typing import TYPE_CHECKING, Any from botocore.exceptions import ClientError, WaiterError @@ -165,11 +166,10 @@ def serialize(self) -> tuple[str, dict[str, Any]]: ) async def run(self) -> AsyncIterator[TriggerEvent]: - async with EcsHook( - aws_conn_id=self.aws_conn_id, region_name=self.region - ).async_conn as ecs_client, AwsLogsHook( - aws_conn_id=self.aws_conn_id, region_name=self.region - ).async_conn as logs_client: + async with ( + EcsHook(aws_conn_id=self.aws_conn_id, region_name=self.region).async_conn as ecs_client, + AwsLogsHook(aws_conn_id=self.aws_conn_id, region_name=self.region).async_conn as logs_client, + ): waiter = ecs_client.get_waiter("tasks_stopped") logs_token = None while self.waiter_max_attempts: diff --git a/airflow/providers/amazon/aws/triggers/eks.py b/providers/src/airflow/providers/amazon/aws/triggers/eks.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/eks.py rename to providers/src/airflow/providers/amazon/aws/triggers/eks.py diff --git a/airflow/providers/amazon/aws/triggers/emr.py b/providers/src/airflow/providers/amazon/aws/triggers/emr.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/emr.py rename to providers/src/airflow/providers/amazon/aws/triggers/emr.py diff --git a/airflow/providers/amazon/aws/triggers/glue.py b/providers/src/airflow/providers/amazon/aws/triggers/glue.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/glue.py rename to providers/src/airflow/providers/amazon/aws/triggers/glue.py diff --git a/airflow/providers/amazon/aws/triggers/glue_crawler.py b/providers/src/airflow/providers/amazon/aws/triggers/glue_crawler.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/glue_crawler.py rename to providers/src/airflow/providers/amazon/aws/triggers/glue_crawler.py diff --git a/airflow/providers/amazon/aws/triggers/glue_databrew.py b/providers/src/airflow/providers/amazon/aws/triggers/glue_databrew.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/glue_databrew.py rename to providers/src/airflow/providers/amazon/aws/triggers/glue_databrew.py diff --git a/airflow/providers/amazon/aws/triggers/kinesis_analytics.py b/providers/src/airflow/providers/amazon/aws/triggers/kinesis_analytics.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/kinesis_analytics.py rename to providers/src/airflow/providers/amazon/aws/triggers/kinesis_analytics.py diff --git a/airflow/providers/amazon/aws/triggers/lambda_function.py b/providers/src/airflow/providers/amazon/aws/triggers/lambda_function.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/lambda_function.py rename to providers/src/airflow/providers/amazon/aws/triggers/lambda_function.py diff --git a/airflow/providers/amazon/aws/triggers/neptune.py b/providers/src/airflow/providers/amazon/aws/triggers/neptune.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/neptune.py rename to providers/src/airflow/providers/amazon/aws/triggers/neptune.py diff --git a/airflow/providers/amazon/aws/triggers/opensearch_serverless.py b/providers/src/airflow/providers/amazon/aws/triggers/opensearch_serverless.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/opensearch_serverless.py rename to providers/src/airflow/providers/amazon/aws/triggers/opensearch_serverless.py diff --git a/airflow/providers/amazon/aws/triggers/rds.py b/providers/src/airflow/providers/amazon/aws/triggers/rds.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/rds.py rename to providers/src/airflow/providers/amazon/aws/triggers/rds.py diff --git a/airflow/providers/amazon/aws/triggers/redshift_cluster.py b/providers/src/airflow/providers/amazon/aws/triggers/redshift_cluster.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/redshift_cluster.py rename to providers/src/airflow/providers/amazon/aws/triggers/redshift_cluster.py diff --git a/airflow/providers/amazon/aws/triggers/redshift_data.py b/providers/src/airflow/providers/amazon/aws/triggers/redshift_data.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/redshift_data.py rename to providers/src/airflow/providers/amazon/aws/triggers/redshift_data.py diff --git a/airflow/providers/amazon/aws/triggers/s3.py b/providers/src/airflow/providers/amazon/aws/triggers/s3.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/s3.py rename to providers/src/airflow/providers/amazon/aws/triggers/s3.py diff --git a/airflow/providers/amazon/aws/triggers/sagemaker.py b/providers/src/airflow/providers/amazon/aws/triggers/sagemaker.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/sagemaker.py rename to providers/src/airflow/providers/amazon/aws/triggers/sagemaker.py diff --git a/airflow/providers/amazon/aws/triggers/sqs.py b/providers/src/airflow/providers/amazon/aws/triggers/sqs.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/sqs.py rename to providers/src/airflow/providers/amazon/aws/triggers/sqs.py diff --git a/airflow/providers/amazon/aws/triggers/step_function.py b/providers/src/airflow/providers/amazon/aws/triggers/step_function.py similarity index 100% rename from airflow/providers/amazon/aws/triggers/step_function.py rename to providers/src/airflow/providers/amazon/aws/triggers/step_function.py diff --git a/airflow/providers/amazon/aws/utils/__init__.py b/providers/src/airflow/providers/amazon/aws/utils/__init__.py similarity index 100% rename from airflow/providers/amazon/aws/utils/__init__.py rename to providers/src/airflow/providers/amazon/aws/utils/__init__.py diff --git a/airflow/providers/amazon/aws/utils/asset_compat_lineage_collector.py b/providers/src/airflow/providers/amazon/aws/utils/asset_compat_lineage_collector.py similarity index 100% rename from airflow/providers/amazon/aws/utils/asset_compat_lineage_collector.py rename to providers/src/airflow/providers/amazon/aws/utils/asset_compat_lineage_collector.py diff --git a/airflow/providers/amazon/aws/utils/connection_wrapper.py b/providers/src/airflow/providers/amazon/aws/utils/connection_wrapper.py similarity index 100% rename from airflow/providers/amazon/aws/utils/connection_wrapper.py rename to providers/src/airflow/providers/amazon/aws/utils/connection_wrapper.py diff --git a/airflow/providers/amazon/aws/utils/eks_get_token.py b/providers/src/airflow/providers/amazon/aws/utils/eks_get_token.py similarity index 100% rename from airflow/providers/amazon/aws/utils/eks_get_token.py rename to providers/src/airflow/providers/amazon/aws/utils/eks_get_token.py diff --git a/airflow/providers/amazon/aws/utils/emailer.py b/providers/src/airflow/providers/amazon/aws/utils/emailer.py similarity index 100% rename from airflow/providers/amazon/aws/utils/emailer.py rename to providers/src/airflow/providers/amazon/aws/utils/emailer.py diff --git a/airflow/providers/amazon/aws/utils/identifiers.py b/providers/src/airflow/providers/amazon/aws/utils/identifiers.py similarity index 100% rename from airflow/providers/amazon/aws/utils/identifiers.py rename to providers/src/airflow/providers/amazon/aws/utils/identifiers.py diff --git a/airflow/providers/amazon/aws/utils/mixins.py b/providers/src/airflow/providers/amazon/aws/utils/mixins.py similarity index 100% rename from airflow/providers/amazon/aws/utils/mixins.py rename to providers/src/airflow/providers/amazon/aws/utils/mixins.py diff --git a/airflow/providers/amazon/aws/utils/openlineage.py b/providers/src/airflow/providers/amazon/aws/utils/openlineage.py similarity index 100% rename from airflow/providers/amazon/aws/utils/openlineage.py rename to providers/src/airflow/providers/amazon/aws/utils/openlineage.py diff --git a/airflow/providers/amazon/aws/utils/rds.py b/providers/src/airflow/providers/amazon/aws/utils/rds.py similarity index 100% rename from airflow/providers/amazon/aws/utils/rds.py rename to providers/src/airflow/providers/amazon/aws/utils/rds.py diff --git a/airflow/providers/amazon/aws/utils/redshift.py b/providers/src/airflow/providers/amazon/aws/utils/redshift.py similarity index 100% rename from airflow/providers/amazon/aws/utils/redshift.py rename to providers/src/airflow/providers/amazon/aws/utils/redshift.py diff --git a/airflow/providers/amazon/aws/utils/sagemaker.py b/providers/src/airflow/providers/amazon/aws/utils/sagemaker.py similarity index 100% rename from airflow/providers/amazon/aws/utils/sagemaker.py rename to providers/src/airflow/providers/amazon/aws/utils/sagemaker.py diff --git a/airflow/providers/amazon/aws/utils/sqs.py b/providers/src/airflow/providers/amazon/aws/utils/sqs.py similarity index 100% rename from airflow/providers/amazon/aws/utils/sqs.py rename to providers/src/airflow/providers/amazon/aws/utils/sqs.py diff --git a/airflow/providers/amazon/aws/utils/suppress.py b/providers/src/airflow/providers/amazon/aws/utils/suppress.py similarity index 100% rename from airflow/providers/amazon/aws/utils/suppress.py rename to providers/src/airflow/providers/amazon/aws/utils/suppress.py diff --git a/airflow/providers/amazon/aws/utils/tags.py b/providers/src/airflow/providers/amazon/aws/utils/tags.py similarity index 100% rename from airflow/providers/amazon/aws/utils/tags.py rename to providers/src/airflow/providers/amazon/aws/utils/tags.py diff --git a/airflow/providers/amazon/aws/utils/task_log_fetcher.py b/providers/src/airflow/providers/amazon/aws/utils/task_log_fetcher.py similarity index 100% rename from airflow/providers/amazon/aws/utils/task_log_fetcher.py rename to providers/src/airflow/providers/amazon/aws/utils/task_log_fetcher.py diff --git a/airflow/providers/amazon/aws/utils/waiter.py b/providers/src/airflow/providers/amazon/aws/utils/waiter.py similarity index 100% rename from airflow/providers/amazon/aws/utils/waiter.py rename to providers/src/airflow/providers/amazon/aws/utils/waiter.py diff --git a/airflow/providers/amazon/aws/utils/waiter_with_logging.py b/providers/src/airflow/providers/amazon/aws/utils/waiter_with_logging.py similarity index 100% rename from airflow/providers/amazon/aws/utils/waiter_with_logging.py rename to providers/src/airflow/providers/amazon/aws/utils/waiter_with_logging.py diff --git a/airflow/providers/amazon/aws/waiters/README.md b/providers/src/airflow/providers/amazon/aws/waiters/README.md similarity index 100% rename from airflow/providers/amazon/aws/waiters/README.md rename to providers/src/airflow/providers/amazon/aws/waiters/README.md diff --git a/airflow/providers/apache/cassandra/hooks/__init__.py b/providers/src/airflow/providers/amazon/aws/waiters/__init__.py similarity index 100% rename from airflow/providers/apache/cassandra/hooks/__init__.py rename to providers/src/airflow/providers/amazon/aws/waiters/__init__.py diff --git a/airflow/providers/amazon/aws/waiters/appflow.json b/providers/src/airflow/providers/amazon/aws/waiters/appflow.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/appflow.json rename to providers/src/airflow/providers/amazon/aws/waiters/appflow.json diff --git a/airflow/providers/amazon/aws/waiters/athena.json b/providers/src/airflow/providers/amazon/aws/waiters/athena.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/athena.json rename to providers/src/airflow/providers/amazon/aws/waiters/athena.json diff --git a/airflow/providers/amazon/aws/waiters/base_waiter.py b/providers/src/airflow/providers/amazon/aws/waiters/base_waiter.py similarity index 100% rename from airflow/providers/amazon/aws/waiters/base_waiter.py rename to providers/src/airflow/providers/amazon/aws/waiters/base_waiter.py diff --git a/airflow/providers/amazon/aws/waiters/batch.json b/providers/src/airflow/providers/amazon/aws/waiters/batch.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/batch.json rename to providers/src/airflow/providers/amazon/aws/waiters/batch.json diff --git a/airflow/providers/amazon/aws/waiters/bedrock-agent.json b/providers/src/airflow/providers/amazon/aws/waiters/bedrock-agent.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/bedrock-agent.json rename to providers/src/airflow/providers/amazon/aws/waiters/bedrock-agent.json diff --git a/airflow/providers/amazon/aws/waiters/bedrock.json b/providers/src/airflow/providers/amazon/aws/waiters/bedrock.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/bedrock.json rename to providers/src/airflow/providers/amazon/aws/waiters/bedrock.json diff --git a/airflow/providers/amazon/aws/waiters/comprehend.json b/providers/src/airflow/providers/amazon/aws/waiters/comprehend.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/comprehend.json rename to providers/src/airflow/providers/amazon/aws/waiters/comprehend.json diff --git a/airflow/providers/amazon/aws/waiters/databrew.json b/providers/src/airflow/providers/amazon/aws/waiters/databrew.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/databrew.json rename to providers/src/airflow/providers/amazon/aws/waiters/databrew.json diff --git a/airflow/providers/amazon/aws/waiters/dynamodb.json b/providers/src/airflow/providers/amazon/aws/waiters/dynamodb.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/dynamodb.json rename to providers/src/airflow/providers/amazon/aws/waiters/dynamodb.json diff --git a/airflow/providers/amazon/aws/waiters/ecs.json b/providers/src/airflow/providers/amazon/aws/waiters/ecs.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/ecs.json rename to providers/src/airflow/providers/amazon/aws/waiters/ecs.json diff --git a/airflow/providers/amazon/aws/waiters/eks.json b/providers/src/airflow/providers/amazon/aws/waiters/eks.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/eks.json rename to providers/src/airflow/providers/amazon/aws/waiters/eks.json diff --git a/airflow/providers/amazon/aws/waiters/emr-containers.json b/providers/src/airflow/providers/amazon/aws/waiters/emr-containers.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/emr-containers.json rename to providers/src/airflow/providers/amazon/aws/waiters/emr-containers.json diff --git a/airflow/providers/amazon/aws/waiters/emr-serverless.json b/providers/src/airflow/providers/amazon/aws/waiters/emr-serverless.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/emr-serverless.json rename to providers/src/airflow/providers/amazon/aws/waiters/emr-serverless.json diff --git a/airflow/providers/amazon/aws/waiters/emr.json b/providers/src/airflow/providers/amazon/aws/waiters/emr.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/emr.json rename to providers/src/airflow/providers/amazon/aws/waiters/emr.json diff --git a/airflow/providers/amazon/aws/waiters/glue.json b/providers/src/airflow/providers/amazon/aws/waiters/glue.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/glue.json rename to providers/src/airflow/providers/amazon/aws/waiters/glue.json diff --git a/airflow/providers/amazon/aws/waiters/kinesisanalyticsv2.json b/providers/src/airflow/providers/amazon/aws/waiters/kinesisanalyticsv2.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/kinesisanalyticsv2.json rename to providers/src/airflow/providers/amazon/aws/waiters/kinesisanalyticsv2.json diff --git a/airflow/providers/amazon/aws/waiters/neptune.json b/providers/src/airflow/providers/amazon/aws/waiters/neptune.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/neptune.json rename to providers/src/airflow/providers/amazon/aws/waiters/neptune.json diff --git a/airflow/providers/amazon/aws/waiters/opensearchserverless.json b/providers/src/airflow/providers/amazon/aws/waiters/opensearchserverless.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/opensearchserverless.json rename to providers/src/airflow/providers/amazon/aws/waiters/opensearchserverless.json diff --git a/airflow/providers/amazon/aws/waiters/rds.json b/providers/src/airflow/providers/amazon/aws/waiters/rds.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/rds.json rename to providers/src/airflow/providers/amazon/aws/waiters/rds.json diff --git a/airflow/providers/amazon/aws/waiters/redshift.json b/providers/src/airflow/providers/amazon/aws/waiters/redshift.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/redshift.json rename to providers/src/airflow/providers/amazon/aws/waiters/redshift.json diff --git a/airflow/providers/amazon/aws/waiters/sagemaker.json b/providers/src/airflow/providers/amazon/aws/waiters/sagemaker.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/sagemaker.json rename to providers/src/airflow/providers/amazon/aws/waiters/sagemaker.json diff --git a/airflow/providers/amazon/aws/waiters/stepfunctions.json b/providers/src/airflow/providers/amazon/aws/waiters/stepfunctions.json similarity index 100% rename from airflow/providers/amazon/aws/waiters/stepfunctions.json rename to providers/src/airflow/providers/amazon/aws/waiters/stepfunctions.json diff --git a/airflow/providers/amazon/provider.yaml b/providers/src/airflow/providers/amazon/provider.yaml similarity index 100% rename from airflow/providers/amazon/provider.yaml rename to providers/src/airflow/providers/amazon/provider.yaml diff --git a/airflow/providers/apache/beam/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/beam/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/beam/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/beam/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/beam/CHANGELOG.rst b/providers/src/airflow/providers/apache/beam/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/beam/CHANGELOG.rst rename to providers/src/airflow/providers/apache/beam/CHANGELOG.rst diff --git a/airflow/providers/apache/beam/README.md b/providers/src/airflow/providers/apache/beam/README.md similarity index 100% rename from airflow/providers/apache/beam/README.md rename to providers/src/airflow/providers/apache/beam/README.md diff --git a/airflow/providers/apache/beam/__init__.py b/providers/src/airflow/providers/apache/beam/__init__.py similarity index 100% rename from airflow/providers/apache/beam/__init__.py rename to providers/src/airflow/providers/apache/beam/__init__.py diff --git a/airflow/providers/apache/beam/hooks/__init__.py b/providers/src/airflow/providers/apache/beam/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/beam/hooks/__init__.py rename to providers/src/airflow/providers/apache/beam/hooks/__init__.py diff --git a/airflow/providers/apache/beam/hooks/beam.py b/providers/src/airflow/providers/apache/beam/hooks/beam.py similarity index 100% rename from airflow/providers/apache/beam/hooks/beam.py rename to providers/src/airflow/providers/apache/beam/hooks/beam.py diff --git a/airflow/providers/apache/beam/operators/__init__.py b/providers/src/airflow/providers/apache/beam/operators/__init__.py similarity index 100% rename from airflow/providers/apache/beam/operators/__init__.py rename to providers/src/airflow/providers/apache/beam/operators/__init__.py diff --git a/airflow/providers/apache/beam/operators/beam.py b/providers/src/airflow/providers/apache/beam/operators/beam.py similarity index 100% rename from airflow/providers/apache/beam/operators/beam.py rename to providers/src/airflow/providers/apache/beam/operators/beam.py diff --git a/airflow/providers/apache/beam/provider.yaml b/providers/src/airflow/providers/apache/beam/provider.yaml similarity index 100% rename from airflow/providers/apache/beam/provider.yaml rename to providers/src/airflow/providers/apache/beam/provider.yaml diff --git a/airflow/providers/apache/cassandra/sensors/__init__.py b/providers/src/airflow/providers/apache/beam/triggers/__init__.py similarity index 100% rename from airflow/providers/apache/cassandra/sensors/__init__.py rename to providers/src/airflow/providers/apache/beam/triggers/__init__.py diff --git a/airflow/providers/apache/beam/triggers/beam.py b/providers/src/airflow/providers/apache/beam/triggers/beam.py similarity index 100% rename from airflow/providers/apache/beam/triggers/beam.py rename to providers/src/airflow/providers/apache/beam/triggers/beam.py diff --git a/airflow/providers/apache/cassandra/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/cassandra/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/cassandra/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/cassandra/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/cassandra/CHANGELOG.rst b/providers/src/airflow/providers/apache/cassandra/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/cassandra/CHANGELOG.rst rename to providers/src/airflow/providers/apache/cassandra/CHANGELOG.rst diff --git a/airflow/providers/apache/cassandra/__init__.py b/providers/src/airflow/providers/apache/cassandra/__init__.py similarity index 100% rename from airflow/providers/apache/cassandra/__init__.py rename to providers/src/airflow/providers/apache/cassandra/__init__.py diff --git a/airflow/providers/apache/druid/transfers/__init__.py b/providers/src/airflow/providers/apache/cassandra/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/druid/transfers/__init__.py rename to providers/src/airflow/providers/apache/cassandra/hooks/__init__.py diff --git a/airflow/providers/apache/cassandra/hooks/cassandra.py b/providers/src/airflow/providers/apache/cassandra/hooks/cassandra.py similarity index 100% rename from airflow/providers/apache/cassandra/hooks/cassandra.py rename to providers/src/airflow/providers/apache/cassandra/hooks/cassandra.py diff --git a/airflow/providers/apache/cassandra/provider.yaml b/providers/src/airflow/providers/apache/cassandra/provider.yaml similarity index 100% rename from airflow/providers/apache/cassandra/provider.yaml rename to providers/src/airflow/providers/apache/cassandra/provider.yaml diff --git a/airflow/providers/apache/hdfs/log/__init__.py b/providers/src/airflow/providers/apache/cassandra/sensors/__init__.py similarity index 100% rename from airflow/providers/apache/hdfs/log/__init__.py rename to providers/src/airflow/providers/apache/cassandra/sensors/__init__.py diff --git a/airflow/providers/apache/cassandra/sensors/record.py b/providers/src/airflow/providers/apache/cassandra/sensors/record.py similarity index 100% rename from airflow/providers/apache/cassandra/sensors/record.py rename to providers/src/airflow/providers/apache/cassandra/sensors/record.py diff --git a/airflow/providers/apache/cassandra/sensors/table.py b/providers/src/airflow/providers/apache/cassandra/sensors/table.py similarity index 100% rename from airflow/providers/apache/cassandra/sensors/table.py rename to providers/src/airflow/providers/apache/cassandra/sensors/table.py diff --git a/airflow/providers/apache/drill/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/drill/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/drill/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/drill/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/drill/CHANGELOG.rst b/providers/src/airflow/providers/apache/drill/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/drill/CHANGELOG.rst rename to providers/src/airflow/providers/apache/drill/CHANGELOG.rst diff --git a/airflow/providers/apache/drill/__init__.py b/providers/src/airflow/providers/apache/drill/__init__.py similarity index 100% rename from airflow/providers/apache/drill/__init__.py rename to providers/src/airflow/providers/apache/drill/__init__.py diff --git a/airflow/providers/apache/drill/hooks/__init__.py b/providers/src/airflow/providers/apache/drill/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/drill/hooks/__init__.py rename to providers/src/airflow/providers/apache/drill/hooks/__init__.py diff --git a/airflow/providers/apache/drill/hooks/drill.py b/providers/src/airflow/providers/apache/drill/hooks/drill.py similarity index 100% rename from airflow/providers/apache/drill/hooks/drill.py rename to providers/src/airflow/providers/apache/drill/hooks/drill.py diff --git a/airflow/providers/apache/drill/operators/__init__.py b/providers/src/airflow/providers/apache/drill/operators/__init__.py similarity index 100% rename from airflow/providers/apache/drill/operators/__init__.py rename to providers/src/airflow/providers/apache/drill/operators/__init__.py diff --git a/airflow/providers/apache/drill/operators/drill.py b/providers/src/airflow/providers/apache/drill/operators/drill.py similarity index 100% rename from airflow/providers/apache/drill/operators/drill.py rename to providers/src/airflow/providers/apache/drill/operators/drill.py diff --git a/airflow/providers/apache/drill/provider.yaml b/providers/src/airflow/providers/apache/drill/provider.yaml similarity index 100% rename from airflow/providers/apache/drill/provider.yaml rename to providers/src/airflow/providers/apache/drill/provider.yaml diff --git a/airflow/providers/apache/druid/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/druid/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/druid/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/druid/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/druid/CHANGELOG.rst b/providers/src/airflow/providers/apache/druid/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/druid/CHANGELOG.rst rename to providers/src/airflow/providers/apache/druid/CHANGELOG.rst diff --git a/airflow/providers/apache/druid/__init__.py b/providers/src/airflow/providers/apache/druid/__init__.py similarity index 100% rename from airflow/providers/apache/druid/__init__.py rename to providers/src/airflow/providers/apache/druid/__init__.py diff --git a/airflow/providers/apache/druid/hooks/__init__.py b/providers/src/airflow/providers/apache/druid/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/druid/hooks/__init__.py rename to providers/src/airflow/providers/apache/druid/hooks/__init__.py diff --git a/airflow/providers/apache/druid/hooks/druid.py b/providers/src/airflow/providers/apache/druid/hooks/druid.py similarity index 100% rename from airflow/providers/apache/druid/hooks/druid.py rename to providers/src/airflow/providers/apache/druid/hooks/druid.py diff --git a/airflow/providers/apache/druid/operators/__init__.py b/providers/src/airflow/providers/apache/druid/operators/__init__.py similarity index 100% rename from airflow/providers/apache/druid/operators/__init__.py rename to providers/src/airflow/providers/apache/druid/operators/__init__.py diff --git a/airflow/providers/apache/druid/operators/druid.py b/providers/src/airflow/providers/apache/druid/operators/druid.py similarity index 100% rename from airflow/providers/apache/druid/operators/druid.py rename to providers/src/airflow/providers/apache/druid/operators/druid.py diff --git a/airflow/providers/apache/druid/operators/druid_check.py b/providers/src/airflow/providers/apache/druid/operators/druid_check.py similarity index 100% rename from airflow/providers/apache/druid/operators/druid_check.py rename to providers/src/airflow/providers/apache/druid/operators/druid_check.py diff --git a/airflow/providers/apache/druid/provider.yaml b/providers/src/airflow/providers/apache/druid/provider.yaml similarity index 100% rename from airflow/providers/apache/druid/provider.yaml rename to providers/src/airflow/providers/apache/druid/provider.yaml diff --git a/airflow/providers/apache/hive/transfers/__init__.py b/providers/src/airflow/providers/apache/druid/transfers/__init__.py similarity index 100% rename from airflow/providers/apache/hive/transfers/__init__.py rename to providers/src/airflow/providers/apache/druid/transfers/__init__.py diff --git a/airflow/providers/apache/druid/transfers/hive_to_druid.py b/providers/src/airflow/providers/apache/druid/transfers/hive_to_druid.py similarity index 100% rename from airflow/providers/apache/druid/transfers/hive_to_druid.py rename to providers/src/airflow/providers/apache/druid/transfers/hive_to_druid.py diff --git a/airflow/providers/apache/flink/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/flink/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/flink/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/flink/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/flink/CHANGELOG.rst b/providers/src/airflow/providers/apache/flink/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/flink/CHANGELOG.rst rename to providers/src/airflow/providers/apache/flink/CHANGELOG.rst diff --git a/airflow/providers/apache/flink/__init__.py b/providers/src/airflow/providers/apache/flink/__init__.py similarity index 100% rename from airflow/providers/apache/flink/__init__.py rename to providers/src/airflow/providers/apache/flink/__init__.py diff --git a/airflow/providers/apache/flink/hooks/__init__.py b/providers/src/airflow/providers/apache/flink/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/flink/hooks/__init__.py rename to providers/src/airflow/providers/apache/flink/hooks/__init__.py diff --git a/airflow/providers/apache/flink/operators/__init__.py b/providers/src/airflow/providers/apache/flink/operators/__init__.py similarity index 100% rename from airflow/providers/apache/flink/operators/__init__.py rename to providers/src/airflow/providers/apache/flink/operators/__init__.py diff --git a/airflow/providers/apache/flink/operators/flink_kubernetes.py b/providers/src/airflow/providers/apache/flink/operators/flink_kubernetes.py similarity index 100% rename from airflow/providers/apache/flink/operators/flink_kubernetes.py rename to providers/src/airflow/providers/apache/flink/operators/flink_kubernetes.py diff --git a/airflow/providers/apache/flink/provider.yaml b/providers/src/airflow/providers/apache/flink/provider.yaml similarity index 100% rename from airflow/providers/apache/flink/provider.yaml rename to providers/src/airflow/providers/apache/flink/provider.yaml diff --git a/airflow/providers/apache/flink/sensors/__init__.py b/providers/src/airflow/providers/apache/flink/sensors/__init__.py similarity index 100% rename from airflow/providers/apache/flink/sensors/__init__.py rename to providers/src/airflow/providers/apache/flink/sensors/__init__.py diff --git a/airflow/providers/apache/flink/sensors/flink_kubernetes.py b/providers/src/airflow/providers/apache/flink/sensors/flink_kubernetes.py similarity index 100% rename from airflow/providers/apache/flink/sensors/flink_kubernetes.py rename to providers/src/airflow/providers/apache/flink/sensors/flink_kubernetes.py diff --git a/airflow/providers/apache/hdfs/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/hdfs/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/hdfs/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/hdfs/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/hdfs/CHANGELOG.rst b/providers/src/airflow/providers/apache/hdfs/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/hdfs/CHANGELOG.rst rename to providers/src/airflow/providers/apache/hdfs/CHANGELOG.rst diff --git a/airflow/providers/apache/hdfs/__init__.py b/providers/src/airflow/providers/apache/hdfs/__init__.py similarity index 100% rename from airflow/providers/apache/hdfs/__init__.py rename to providers/src/airflow/providers/apache/hdfs/__init__.py diff --git a/airflow/providers/apache/hdfs/hooks/__init__.py b/providers/src/airflow/providers/apache/hdfs/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/hdfs/hooks/__init__.py rename to providers/src/airflow/providers/apache/hdfs/hooks/__init__.py diff --git a/airflow/providers/apache/hdfs/hooks/hdfs.py b/providers/src/airflow/providers/apache/hdfs/hooks/hdfs.py similarity index 100% rename from airflow/providers/apache/hdfs/hooks/hdfs.py rename to providers/src/airflow/providers/apache/hdfs/hooks/hdfs.py diff --git a/airflow/providers/apache/hdfs/hooks/webhdfs.py b/providers/src/airflow/providers/apache/hdfs/hooks/webhdfs.py similarity index 100% rename from airflow/providers/apache/hdfs/hooks/webhdfs.py rename to providers/src/airflow/providers/apache/hdfs/hooks/webhdfs.py diff --git a/airflow/providers/apache/kafka/hooks/__init__.py b/providers/src/airflow/providers/apache/hdfs/log/__init__.py similarity index 100% rename from airflow/providers/apache/kafka/hooks/__init__.py rename to providers/src/airflow/providers/apache/hdfs/log/__init__.py diff --git a/airflow/providers/apache/hdfs/log/hdfs_task_handler.py b/providers/src/airflow/providers/apache/hdfs/log/hdfs_task_handler.py similarity index 100% rename from airflow/providers/apache/hdfs/log/hdfs_task_handler.py rename to providers/src/airflow/providers/apache/hdfs/log/hdfs_task_handler.py diff --git a/airflow/providers/apache/hdfs/provider.yaml b/providers/src/airflow/providers/apache/hdfs/provider.yaml similarity index 100% rename from airflow/providers/apache/hdfs/provider.yaml rename to providers/src/airflow/providers/apache/hdfs/provider.yaml diff --git a/airflow/providers/apache/hdfs/sensors/__init__.py b/providers/src/airflow/providers/apache/hdfs/sensors/__init__.py similarity index 100% rename from airflow/providers/apache/hdfs/sensors/__init__.py rename to providers/src/airflow/providers/apache/hdfs/sensors/__init__.py diff --git a/airflow/providers/apache/hdfs/sensors/hdfs.py b/providers/src/airflow/providers/apache/hdfs/sensors/hdfs.py similarity index 100% rename from airflow/providers/apache/hdfs/sensors/hdfs.py rename to providers/src/airflow/providers/apache/hdfs/sensors/hdfs.py diff --git a/airflow/providers/apache/hdfs/sensors/web_hdfs.py b/providers/src/airflow/providers/apache/hdfs/sensors/web_hdfs.py similarity index 100% rename from airflow/providers/apache/hdfs/sensors/web_hdfs.py rename to providers/src/airflow/providers/apache/hdfs/sensors/web_hdfs.py diff --git a/airflow/providers/apache/hive/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/hive/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/hive/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/hive/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/hive/CHANGELOG.rst b/providers/src/airflow/providers/apache/hive/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/hive/CHANGELOG.rst rename to providers/src/airflow/providers/apache/hive/CHANGELOG.rst diff --git a/airflow/providers/apache/hive/__init__.py b/providers/src/airflow/providers/apache/hive/__init__.py similarity index 100% rename from airflow/providers/apache/hive/__init__.py rename to providers/src/airflow/providers/apache/hive/__init__.py diff --git a/airflow/providers/apache/hive/hooks/__init__.py b/providers/src/airflow/providers/apache/hive/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/hive/hooks/__init__.py rename to providers/src/airflow/providers/apache/hive/hooks/__init__.py diff --git a/airflow/providers/apache/hive/hooks/hive.py b/providers/src/airflow/providers/apache/hive/hooks/hive.py similarity index 99% rename from airflow/providers/apache/hive/hooks/hive.py rename to providers/src/airflow/providers/apache/hive/hooks/hive.py index e08c4d6128d81..d768743cd459b 100644 --- a/airflow/providers/apache/hive/hooks/hive.py +++ b/providers/src/airflow/providers/apache/hive/hooks/hive.py @@ -23,8 +23,9 @@ import socket import subprocess import time +from collections.abc import Iterable, Mapping from tempfile import NamedTemporaryFile, TemporaryDirectory -from typing import TYPE_CHECKING, Any, Iterable, Mapping +from typing import TYPE_CHECKING, Any if TYPE_CHECKING: import pandas as pd @@ -411,9 +412,10 @@ def _infer_field_types_from_df(df: pd.DataFrame) -> dict[Any, Any]: if pandas_kwargs is None: pandas_kwargs = {} - with TemporaryDirectory(prefix="airflow_hiveop_") as tmp_dir, NamedTemporaryFile( - dir=tmp_dir, mode="w" - ) as f: + with ( + TemporaryDirectory(prefix="airflow_hiveop_") as tmp_dir, + NamedTemporaryFile(dir=tmp_dir, mode="w") as f, + ): if field_dict is None: field_dict = _infer_field_types_from_df(df) diff --git a/airflow/providers/apache/hive/macros/__init__.py b/providers/src/airflow/providers/apache/hive/macros/__init__.py similarity index 100% rename from airflow/providers/apache/hive/macros/__init__.py rename to providers/src/airflow/providers/apache/hive/macros/__init__.py diff --git a/airflow/providers/apache/hive/macros/hive.py b/providers/src/airflow/providers/apache/hive/macros/hive.py similarity index 100% rename from airflow/providers/apache/hive/macros/hive.py rename to providers/src/airflow/providers/apache/hive/macros/hive.py diff --git a/airflow/providers/apache/hive/operators/__init__.py b/providers/src/airflow/providers/apache/hive/operators/__init__.py similarity index 100% rename from airflow/providers/apache/hive/operators/__init__.py rename to providers/src/airflow/providers/apache/hive/operators/__init__.py diff --git a/airflow/providers/apache/hive/operators/hive.py b/providers/src/airflow/providers/apache/hive/operators/hive.py similarity index 100% rename from airflow/providers/apache/hive/operators/hive.py rename to providers/src/airflow/providers/apache/hive/operators/hive.py diff --git a/airflow/providers/apache/hive/operators/hive_stats.py b/providers/src/airflow/providers/apache/hive/operators/hive_stats.py similarity index 100% rename from airflow/providers/apache/hive/operators/hive_stats.py rename to providers/src/airflow/providers/apache/hive/operators/hive_stats.py diff --git a/airflow/providers/apache/hive/plugins/__init__.py b/providers/src/airflow/providers/apache/hive/plugins/__init__.py similarity index 100% rename from airflow/providers/apache/hive/plugins/__init__.py rename to providers/src/airflow/providers/apache/hive/plugins/__init__.py diff --git a/airflow/providers/apache/hive/plugins/hive.py b/providers/src/airflow/providers/apache/hive/plugins/hive.py similarity index 100% rename from airflow/providers/apache/hive/plugins/hive.py rename to providers/src/airflow/providers/apache/hive/plugins/hive.py diff --git a/airflow/providers/apache/hive/provider.yaml b/providers/src/airflow/providers/apache/hive/provider.yaml similarity index 100% rename from airflow/providers/apache/hive/provider.yaml rename to providers/src/airflow/providers/apache/hive/provider.yaml diff --git a/airflow/providers/apache/hive/sensors/__init__.py b/providers/src/airflow/providers/apache/hive/sensors/__init__.py similarity index 100% rename from airflow/providers/apache/hive/sensors/__init__.py rename to providers/src/airflow/providers/apache/hive/sensors/__init__.py diff --git a/airflow/providers/apache/hive/sensors/hive_partition.py b/providers/src/airflow/providers/apache/hive/sensors/hive_partition.py similarity index 100% rename from airflow/providers/apache/hive/sensors/hive_partition.py rename to providers/src/airflow/providers/apache/hive/sensors/hive_partition.py diff --git a/airflow/providers/apache/hive/sensors/metastore_partition.py b/providers/src/airflow/providers/apache/hive/sensors/metastore_partition.py similarity index 100% rename from airflow/providers/apache/hive/sensors/metastore_partition.py rename to providers/src/airflow/providers/apache/hive/sensors/metastore_partition.py diff --git a/airflow/providers/apache/hive/sensors/named_hive_partition.py b/providers/src/airflow/providers/apache/hive/sensors/named_hive_partition.py similarity index 100% rename from airflow/providers/apache/hive/sensors/named_hive_partition.py rename to providers/src/airflow/providers/apache/hive/sensors/named_hive_partition.py diff --git a/airflow/providers/apache/kafka/operators/__init__.py b/providers/src/airflow/providers/apache/hive/transfers/__init__.py similarity index 100% rename from airflow/providers/apache/kafka/operators/__init__.py rename to providers/src/airflow/providers/apache/hive/transfers/__init__.py diff --git a/airflow/providers/apache/hive/transfers/hive_to_mysql.py b/providers/src/airflow/providers/apache/hive/transfers/hive_to_mysql.py similarity index 100% rename from airflow/providers/apache/hive/transfers/hive_to_mysql.py rename to providers/src/airflow/providers/apache/hive/transfers/hive_to_mysql.py diff --git a/airflow/providers/apache/hive/transfers/hive_to_samba.py b/providers/src/airflow/providers/apache/hive/transfers/hive_to_samba.py similarity index 100% rename from airflow/providers/apache/hive/transfers/hive_to_samba.py rename to providers/src/airflow/providers/apache/hive/transfers/hive_to_samba.py diff --git a/airflow/providers/apache/hive/transfers/mssql_to_hive.py b/providers/src/airflow/providers/apache/hive/transfers/mssql_to_hive.py similarity index 100% rename from airflow/providers/apache/hive/transfers/mssql_to_hive.py rename to providers/src/airflow/providers/apache/hive/transfers/mssql_to_hive.py diff --git a/airflow/providers/apache/hive/transfers/mysql_to_hive.py b/providers/src/airflow/providers/apache/hive/transfers/mysql_to_hive.py similarity index 100% rename from airflow/providers/apache/hive/transfers/mysql_to_hive.py rename to providers/src/airflow/providers/apache/hive/transfers/mysql_to_hive.py diff --git a/airflow/providers/apache/hive/transfers/s3_to_hive.py b/providers/src/airflow/providers/apache/hive/transfers/s3_to_hive.py similarity index 97% rename from airflow/providers/apache/hive/transfers/s3_to_hive.py rename to providers/src/airflow/providers/apache/hive/transfers/s3_to_hive.py index ebc9536897018..6285103d370bd 100644 --- a/airflow/providers/apache/hive/transfers/s3_to_hive.py +++ b/providers/src/airflow/providers/apache/hive/transfers/s3_to_hive.py @@ -23,8 +23,9 @@ import gzip import os import tempfile +from collections.abc import Sequence from tempfile import NamedTemporaryFile, TemporaryDirectory -from typing import TYPE_CHECKING, Any, Sequence +from typing import TYPE_CHECKING, Any from airflow.exceptions import AirflowException from airflow.models import BaseOperator @@ -161,9 +162,10 @@ def execute(self, context: Context): if self.select_expression and self.input_compressed and file_ext.lower() != ".gz": raise AirflowException("GZIP is the only compression format Amazon S3 Select supports") - with TemporaryDirectory(prefix="tmps32hive_") as tmp_dir, NamedTemporaryFile( - mode="wb", dir=tmp_dir, suffix=file_ext - ) as f: + with ( + TemporaryDirectory(prefix="tmps32hive_") as tmp_dir, + NamedTemporaryFile(mode="wb", dir=tmp_dir, suffix=file_ext) as f, + ): self.log.info("Dumping S3 key %s contents to local file %s", s3_key_object.key, f.name) if self.select_expression: option = {} diff --git a/airflow/providers/apache/hive/transfers/vertica_to_hive.py b/providers/src/airflow/providers/apache/hive/transfers/vertica_to_hive.py similarity index 100% rename from airflow/providers/apache/hive/transfers/vertica_to_hive.py rename to providers/src/airflow/providers/apache/hive/transfers/vertica_to_hive.py diff --git a/airflow/providers/apache/iceberg/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/iceberg/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/iceberg/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/iceberg/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/iceberg/CHANGELOG.rst b/providers/src/airflow/providers/apache/iceberg/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/iceberg/CHANGELOG.rst rename to providers/src/airflow/providers/apache/iceberg/CHANGELOG.rst diff --git a/airflow/providers/apache/iceberg/__init__.py b/providers/src/airflow/providers/apache/iceberg/__init__.py similarity index 100% rename from airflow/providers/apache/iceberg/__init__.py rename to providers/src/airflow/providers/apache/iceberg/__init__.py diff --git a/airflow/providers/apache/iceberg/hooks/__init__.py b/providers/src/airflow/providers/apache/iceberg/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/iceberg/hooks/__init__.py rename to providers/src/airflow/providers/apache/iceberg/hooks/__init__.py diff --git a/airflow/providers/apache/iceberg/hooks/iceberg.py b/providers/src/airflow/providers/apache/iceberg/hooks/iceberg.py similarity index 100% rename from airflow/providers/apache/iceberg/hooks/iceberg.py rename to providers/src/airflow/providers/apache/iceberg/hooks/iceberg.py diff --git a/airflow/providers/apache/iceberg/provider.yaml b/providers/src/airflow/providers/apache/iceberg/provider.yaml similarity index 100% rename from airflow/providers/apache/iceberg/provider.yaml rename to providers/src/airflow/providers/apache/iceberg/provider.yaml diff --git a/airflow/providers/apache/impala/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/impala/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/impala/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/impala/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/impala/CHANGELOG.rst b/providers/src/airflow/providers/apache/impala/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/impala/CHANGELOG.rst rename to providers/src/airflow/providers/apache/impala/CHANGELOG.rst diff --git a/airflow/providers/apache/impala/__init__.py b/providers/src/airflow/providers/apache/impala/__init__.py similarity index 100% rename from airflow/providers/apache/impala/__init__.py rename to providers/src/airflow/providers/apache/impala/__init__.py diff --git a/airflow/providers/apache/impala/hooks/__init__.py b/providers/src/airflow/providers/apache/impala/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/impala/hooks/__init__.py rename to providers/src/airflow/providers/apache/impala/hooks/__init__.py diff --git a/airflow/providers/apache/impala/hooks/impala.py b/providers/src/airflow/providers/apache/impala/hooks/impala.py similarity index 100% rename from airflow/providers/apache/impala/hooks/impala.py rename to providers/src/airflow/providers/apache/impala/hooks/impala.py diff --git a/airflow/providers/apache/impala/provider.yaml b/providers/src/airflow/providers/apache/impala/provider.yaml similarity index 100% rename from airflow/providers/apache/impala/provider.yaml rename to providers/src/airflow/providers/apache/impala/provider.yaml diff --git a/airflow/providers/apache/kafka/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/kafka/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/kafka/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/kafka/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/kafka/CHANGELOG.rst b/providers/src/airflow/providers/apache/kafka/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/kafka/CHANGELOG.rst rename to providers/src/airflow/providers/apache/kafka/CHANGELOG.rst diff --git a/airflow/providers/apache/kafka/__init__.py b/providers/src/airflow/providers/apache/kafka/__init__.py similarity index 100% rename from airflow/providers/apache/kafka/__init__.py rename to providers/src/airflow/providers/apache/kafka/__init__.py diff --git a/airflow/providers/apache/kafka/sensors/__init__.py b/providers/src/airflow/providers/apache/kafka/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/kafka/sensors/__init__.py rename to providers/src/airflow/providers/apache/kafka/hooks/__init__.py diff --git a/airflow/providers/apache/kafka/hooks/base.py b/providers/src/airflow/providers/apache/kafka/hooks/base.py similarity index 100% rename from airflow/providers/apache/kafka/hooks/base.py rename to providers/src/airflow/providers/apache/kafka/hooks/base.py diff --git a/airflow/providers/apache/kafka/hooks/client.py b/providers/src/airflow/providers/apache/kafka/hooks/client.py similarity index 100% rename from airflow/providers/apache/kafka/hooks/client.py rename to providers/src/airflow/providers/apache/kafka/hooks/client.py diff --git a/airflow/providers/apache/kafka/hooks/consume.py b/providers/src/airflow/providers/apache/kafka/hooks/consume.py similarity index 100% rename from airflow/providers/apache/kafka/hooks/consume.py rename to providers/src/airflow/providers/apache/kafka/hooks/consume.py diff --git a/airflow/providers/apache/kafka/hooks/produce.py b/providers/src/airflow/providers/apache/kafka/hooks/produce.py similarity index 100% rename from airflow/providers/apache/kafka/hooks/produce.py rename to providers/src/airflow/providers/apache/kafka/hooks/produce.py diff --git a/airflow/providers/apache/kafka/triggers/__init__.py b/providers/src/airflow/providers/apache/kafka/operators/__init__.py similarity index 100% rename from airflow/providers/apache/kafka/triggers/__init__.py rename to providers/src/airflow/providers/apache/kafka/operators/__init__.py diff --git a/airflow/providers/apache/kafka/operators/consume.py b/providers/src/airflow/providers/apache/kafka/operators/consume.py similarity index 100% rename from airflow/providers/apache/kafka/operators/consume.py rename to providers/src/airflow/providers/apache/kafka/operators/consume.py diff --git a/airflow/providers/apache/kafka/operators/produce.py b/providers/src/airflow/providers/apache/kafka/operators/produce.py similarity index 100% rename from airflow/providers/apache/kafka/operators/produce.py rename to providers/src/airflow/providers/apache/kafka/operators/produce.py diff --git a/airflow/providers/apache/kafka/provider.yaml b/providers/src/airflow/providers/apache/kafka/provider.yaml similarity index 100% rename from airflow/providers/apache/kafka/provider.yaml rename to providers/src/airflow/providers/apache/kafka/provider.yaml diff --git a/airflow/providers/apache/kylin/hooks/__init__.py b/providers/src/airflow/providers/apache/kafka/sensors/__init__.py similarity index 100% rename from airflow/providers/apache/kylin/hooks/__init__.py rename to providers/src/airflow/providers/apache/kafka/sensors/__init__.py diff --git a/airflow/providers/apache/kafka/sensors/kafka.py b/providers/src/airflow/providers/apache/kafka/sensors/kafka.py similarity index 100% rename from airflow/providers/apache/kafka/sensors/kafka.py rename to providers/src/airflow/providers/apache/kafka/sensors/kafka.py diff --git a/airflow/providers/apache/kylin/operators/__init__.py b/providers/src/airflow/providers/apache/kafka/triggers/__init__.py similarity index 100% rename from airflow/providers/apache/kylin/operators/__init__.py rename to providers/src/airflow/providers/apache/kafka/triggers/__init__.py diff --git a/airflow/providers/apache/kafka/triggers/await_message.py b/providers/src/airflow/providers/apache/kafka/triggers/await_message.py similarity index 100% rename from airflow/providers/apache/kafka/triggers/await_message.py rename to providers/src/airflow/providers/apache/kafka/triggers/await_message.py diff --git a/airflow/providers/apache/kylin/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/kylin/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/kylin/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/kylin/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/kylin/CHANGELOG.rst b/providers/src/airflow/providers/apache/kylin/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/kylin/CHANGELOG.rst rename to providers/src/airflow/providers/apache/kylin/CHANGELOG.rst diff --git a/airflow/providers/apache/kylin/__init__.py b/providers/src/airflow/providers/apache/kylin/__init__.py similarity index 100% rename from airflow/providers/apache/kylin/__init__.py rename to providers/src/airflow/providers/apache/kylin/__init__.py diff --git a/airflow/providers/apache/livy/hooks/__init__.py b/providers/src/airflow/providers/apache/kylin/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/livy/hooks/__init__.py rename to providers/src/airflow/providers/apache/kylin/hooks/__init__.py diff --git a/airflow/providers/apache/kylin/hooks/kylin.py b/providers/src/airflow/providers/apache/kylin/hooks/kylin.py similarity index 100% rename from airflow/providers/apache/kylin/hooks/kylin.py rename to providers/src/airflow/providers/apache/kylin/hooks/kylin.py diff --git a/airflow/providers/apache/livy/operators/__init__.py b/providers/src/airflow/providers/apache/kylin/operators/__init__.py similarity index 100% rename from airflow/providers/apache/livy/operators/__init__.py rename to providers/src/airflow/providers/apache/kylin/operators/__init__.py diff --git a/airflow/providers/apache/kylin/operators/kylin_cube.py b/providers/src/airflow/providers/apache/kylin/operators/kylin_cube.py similarity index 100% rename from airflow/providers/apache/kylin/operators/kylin_cube.py rename to providers/src/airflow/providers/apache/kylin/operators/kylin_cube.py diff --git a/airflow/providers/apache/kylin/provider.yaml b/providers/src/airflow/providers/apache/kylin/provider.yaml similarity index 100% rename from airflow/providers/apache/kylin/provider.yaml rename to providers/src/airflow/providers/apache/kylin/provider.yaml diff --git a/airflow/providers/apache/livy/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/livy/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/livy/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/livy/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/livy/CHANGELOG.rst b/providers/src/airflow/providers/apache/livy/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/livy/CHANGELOG.rst rename to providers/src/airflow/providers/apache/livy/CHANGELOG.rst diff --git a/airflow/providers/apache/livy/__init__.py b/providers/src/airflow/providers/apache/livy/__init__.py similarity index 100% rename from airflow/providers/apache/livy/__init__.py rename to providers/src/airflow/providers/apache/livy/__init__.py diff --git a/airflow/providers/apache/livy/sensors/__init__.py b/providers/src/airflow/providers/apache/livy/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/livy/sensors/__init__.py rename to providers/src/airflow/providers/apache/livy/hooks/__init__.py diff --git a/airflow/providers/apache/livy/hooks/livy.py b/providers/src/airflow/providers/apache/livy/hooks/livy.py similarity index 100% rename from airflow/providers/apache/livy/hooks/livy.py rename to providers/src/airflow/providers/apache/livy/hooks/livy.py diff --git a/airflow/providers/apache/livy/triggers/__init__.py b/providers/src/airflow/providers/apache/livy/operators/__init__.py similarity index 100% rename from airflow/providers/apache/livy/triggers/__init__.py rename to providers/src/airflow/providers/apache/livy/operators/__init__.py diff --git a/airflow/providers/apache/livy/operators/livy.py b/providers/src/airflow/providers/apache/livy/operators/livy.py similarity index 100% rename from airflow/providers/apache/livy/operators/livy.py rename to providers/src/airflow/providers/apache/livy/operators/livy.py diff --git a/airflow/providers/apache/livy/provider.yaml b/providers/src/airflow/providers/apache/livy/provider.yaml similarity index 100% rename from airflow/providers/apache/livy/provider.yaml rename to providers/src/airflow/providers/apache/livy/provider.yaml diff --git a/airflow/providers/arangodb/example_dags/__init__.py b/providers/src/airflow/providers/apache/livy/sensors/__init__.py similarity index 100% rename from airflow/providers/arangodb/example_dags/__init__.py rename to providers/src/airflow/providers/apache/livy/sensors/__init__.py diff --git a/airflow/providers/apache/livy/sensors/livy.py b/providers/src/airflow/providers/apache/livy/sensors/livy.py similarity index 100% rename from airflow/providers/apache/livy/sensors/livy.py rename to providers/src/airflow/providers/apache/livy/sensors/livy.py diff --git a/airflow/providers/arangodb/hooks/__init__.py b/providers/src/airflow/providers/apache/livy/triggers/__init__.py similarity index 100% rename from airflow/providers/arangodb/hooks/__init__.py rename to providers/src/airflow/providers/apache/livy/triggers/__init__.py diff --git a/airflow/providers/apache/livy/triggers/livy.py b/providers/src/airflow/providers/apache/livy/triggers/livy.py similarity index 100% rename from airflow/providers/apache/livy/triggers/livy.py rename to providers/src/airflow/providers/apache/livy/triggers/livy.py diff --git a/airflow/providers/apache/pig/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/pig/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/pig/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/pig/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/pig/CHANGELOG.rst b/providers/src/airflow/providers/apache/pig/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/pig/CHANGELOG.rst rename to providers/src/airflow/providers/apache/pig/CHANGELOG.rst diff --git a/airflow/providers/apache/pig/__init__.py b/providers/src/airflow/providers/apache/pig/__init__.py similarity index 100% rename from airflow/providers/apache/pig/__init__.py rename to providers/src/airflow/providers/apache/pig/__init__.py diff --git a/airflow/providers/apache/pig/hooks/__init__.py b/providers/src/airflow/providers/apache/pig/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/pig/hooks/__init__.py rename to providers/src/airflow/providers/apache/pig/hooks/__init__.py diff --git a/airflow/providers/apache/pig/hooks/pig.py b/providers/src/airflow/providers/apache/pig/hooks/pig.py similarity index 100% rename from airflow/providers/apache/pig/hooks/pig.py rename to providers/src/airflow/providers/apache/pig/hooks/pig.py diff --git a/airflow/providers/apache/pig/operators/__init__.py b/providers/src/airflow/providers/apache/pig/operators/__init__.py similarity index 100% rename from airflow/providers/apache/pig/operators/__init__.py rename to providers/src/airflow/providers/apache/pig/operators/__init__.py diff --git a/airflow/providers/apache/pig/operators/pig.py b/providers/src/airflow/providers/apache/pig/operators/pig.py similarity index 100% rename from airflow/providers/apache/pig/operators/pig.py rename to providers/src/airflow/providers/apache/pig/operators/pig.py diff --git a/airflow/providers/apache/pig/provider.yaml b/providers/src/airflow/providers/apache/pig/provider.yaml similarity index 100% rename from airflow/providers/apache/pig/provider.yaml rename to providers/src/airflow/providers/apache/pig/provider.yaml diff --git a/airflow/providers/apache/pinot/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/pinot/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/pinot/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/pinot/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/pinot/CHANGELOG.rst b/providers/src/airflow/providers/apache/pinot/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/pinot/CHANGELOG.rst rename to providers/src/airflow/providers/apache/pinot/CHANGELOG.rst diff --git a/airflow/providers/apache/pinot/__init__.py b/providers/src/airflow/providers/apache/pinot/__init__.py similarity index 100% rename from airflow/providers/apache/pinot/__init__.py rename to providers/src/airflow/providers/apache/pinot/__init__.py diff --git a/airflow/providers/apache/pinot/hooks/__init__.py b/providers/src/airflow/providers/apache/pinot/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/pinot/hooks/__init__.py rename to providers/src/airflow/providers/apache/pinot/hooks/__init__.py diff --git a/airflow/providers/apache/pinot/hooks/pinot.py b/providers/src/airflow/providers/apache/pinot/hooks/pinot.py similarity index 100% rename from airflow/providers/apache/pinot/hooks/pinot.py rename to providers/src/airflow/providers/apache/pinot/hooks/pinot.py diff --git a/airflow/providers/apache/pinot/provider.yaml b/providers/src/airflow/providers/apache/pinot/provider.yaml similarity index 100% rename from airflow/providers/apache/pinot/provider.yaml rename to providers/src/airflow/providers/apache/pinot/provider.yaml diff --git a/airflow/providers/apache/spark/.latest-doc-only-change.txt b/providers/src/airflow/providers/apache/spark/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apache/spark/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apache/spark/.latest-doc-only-change.txt diff --git a/airflow/providers/apache/spark/CHANGELOG.rst b/providers/src/airflow/providers/apache/spark/CHANGELOG.rst similarity index 100% rename from airflow/providers/apache/spark/CHANGELOG.rst rename to providers/src/airflow/providers/apache/spark/CHANGELOG.rst diff --git a/airflow/providers/apache/spark/__init__.py b/providers/src/airflow/providers/apache/spark/__init__.py similarity index 100% rename from airflow/providers/apache/spark/__init__.py rename to providers/src/airflow/providers/apache/spark/__init__.py diff --git a/airflow/providers/apache/spark/decorators/__init__.py b/providers/src/airflow/providers/apache/spark/decorators/__init__.py similarity index 100% rename from airflow/providers/apache/spark/decorators/__init__.py rename to providers/src/airflow/providers/apache/spark/decorators/__init__.py diff --git a/airflow/providers/apache/spark/decorators/pyspark.py b/providers/src/airflow/providers/apache/spark/decorators/pyspark.py similarity index 100% rename from airflow/providers/apache/spark/decorators/pyspark.py rename to providers/src/airflow/providers/apache/spark/decorators/pyspark.py diff --git a/airflow/providers/apache/spark/hooks/__init__.py b/providers/src/airflow/providers/apache/spark/hooks/__init__.py similarity index 100% rename from airflow/providers/apache/spark/hooks/__init__.py rename to providers/src/airflow/providers/apache/spark/hooks/__init__.py diff --git a/airflow/providers/apache/spark/hooks/spark_connect.py b/providers/src/airflow/providers/apache/spark/hooks/spark_connect.py similarity index 100% rename from airflow/providers/apache/spark/hooks/spark_connect.py rename to providers/src/airflow/providers/apache/spark/hooks/spark_connect.py diff --git a/airflow/providers/apache/spark/hooks/spark_jdbc.py b/providers/src/airflow/providers/apache/spark/hooks/spark_jdbc.py similarity index 100% rename from airflow/providers/apache/spark/hooks/spark_jdbc.py rename to providers/src/airflow/providers/apache/spark/hooks/spark_jdbc.py diff --git a/airflow/providers/apache/spark/hooks/spark_jdbc_script.py b/providers/src/airflow/providers/apache/spark/hooks/spark_jdbc_script.py similarity index 100% rename from airflow/providers/apache/spark/hooks/spark_jdbc_script.py rename to providers/src/airflow/providers/apache/spark/hooks/spark_jdbc_script.py diff --git a/airflow/providers/apache/spark/hooks/spark_sql.py b/providers/src/airflow/providers/apache/spark/hooks/spark_sql.py similarity index 100% rename from airflow/providers/apache/spark/hooks/spark_sql.py rename to providers/src/airflow/providers/apache/spark/hooks/spark_sql.py diff --git a/airflow/providers/apache/spark/hooks/spark_submit.py b/providers/src/airflow/providers/apache/spark/hooks/spark_submit.py similarity index 100% rename from airflow/providers/apache/spark/hooks/spark_submit.py rename to providers/src/airflow/providers/apache/spark/hooks/spark_submit.py diff --git a/airflow/providers/apache/spark/operators/__init__.py b/providers/src/airflow/providers/apache/spark/operators/__init__.py similarity index 100% rename from airflow/providers/apache/spark/operators/__init__.py rename to providers/src/airflow/providers/apache/spark/operators/__init__.py diff --git a/airflow/providers/apache/spark/operators/spark_jdbc.py b/providers/src/airflow/providers/apache/spark/operators/spark_jdbc.py similarity index 100% rename from airflow/providers/apache/spark/operators/spark_jdbc.py rename to providers/src/airflow/providers/apache/spark/operators/spark_jdbc.py diff --git a/airflow/providers/apache/spark/operators/spark_sql.py b/providers/src/airflow/providers/apache/spark/operators/spark_sql.py similarity index 100% rename from airflow/providers/apache/spark/operators/spark_sql.py rename to providers/src/airflow/providers/apache/spark/operators/spark_sql.py diff --git a/airflow/providers/apache/spark/operators/spark_submit.py b/providers/src/airflow/providers/apache/spark/operators/spark_submit.py similarity index 100% rename from airflow/providers/apache/spark/operators/spark_submit.py rename to providers/src/airflow/providers/apache/spark/operators/spark_submit.py diff --git a/airflow/providers/apache/spark/provider.yaml b/providers/src/airflow/providers/apache/spark/provider.yaml similarity index 100% rename from airflow/providers/apache/spark/provider.yaml rename to providers/src/airflow/providers/apache/spark/provider.yaml diff --git a/airflow/providers/apprise/.latest-doc-only-change.txt b/providers/src/airflow/providers/apprise/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/apprise/.latest-doc-only-change.txt rename to providers/src/airflow/providers/apprise/.latest-doc-only-change.txt diff --git a/airflow/providers/apprise/CHANGELOG.rst b/providers/src/airflow/providers/apprise/CHANGELOG.rst similarity index 100% rename from airflow/providers/apprise/CHANGELOG.rst rename to providers/src/airflow/providers/apprise/CHANGELOG.rst diff --git a/airflow/providers/apprise/__init__.py b/providers/src/airflow/providers/apprise/__init__.py similarity index 100% rename from airflow/providers/apprise/__init__.py rename to providers/src/airflow/providers/apprise/__init__.py diff --git a/airflow/providers/apprise/hooks/__init__.py b/providers/src/airflow/providers/apprise/hooks/__init__.py similarity index 100% rename from airflow/providers/apprise/hooks/__init__.py rename to providers/src/airflow/providers/apprise/hooks/__init__.py diff --git a/airflow/providers/apprise/hooks/apprise.py b/providers/src/airflow/providers/apprise/hooks/apprise.py similarity index 100% rename from airflow/providers/apprise/hooks/apprise.py rename to providers/src/airflow/providers/apprise/hooks/apprise.py diff --git a/airflow/providers/apprise/notifications/__init__.py b/providers/src/airflow/providers/apprise/notifications/__init__.py similarity index 100% rename from airflow/providers/apprise/notifications/__init__.py rename to providers/src/airflow/providers/apprise/notifications/__init__.py diff --git a/airflow/providers/apprise/notifications/apprise.py b/providers/src/airflow/providers/apprise/notifications/apprise.py similarity index 100% rename from airflow/providers/apprise/notifications/apprise.py rename to providers/src/airflow/providers/apprise/notifications/apprise.py diff --git a/airflow/providers/apprise/provider.yaml b/providers/src/airflow/providers/apprise/provider.yaml similarity index 100% rename from airflow/providers/apprise/provider.yaml rename to providers/src/airflow/providers/apprise/provider.yaml diff --git a/airflow/providers/arangodb/.latest-doc-only-change.txt b/providers/src/airflow/providers/arangodb/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/arangodb/.latest-doc-only-change.txt rename to providers/src/airflow/providers/arangodb/.latest-doc-only-change.txt diff --git a/airflow/providers/arangodb/CHANGELOG.rst b/providers/src/airflow/providers/arangodb/CHANGELOG.rst similarity index 100% rename from airflow/providers/arangodb/CHANGELOG.rst rename to providers/src/airflow/providers/arangodb/CHANGELOG.rst diff --git a/airflow/providers/arangodb/__init__.py b/providers/src/airflow/providers/arangodb/__init__.py similarity index 100% rename from airflow/providers/arangodb/__init__.py rename to providers/src/airflow/providers/arangodb/__init__.py diff --git a/airflow/providers/arangodb/operators/__init__.py b/providers/src/airflow/providers/arangodb/example_dags/__init__.py similarity index 100% rename from airflow/providers/arangodb/operators/__init__.py rename to providers/src/airflow/providers/arangodb/example_dags/__init__.py diff --git a/airflow/providers/arangodb/example_dags/example_arangodb.py b/providers/src/airflow/providers/arangodb/example_dags/example_arangodb.py similarity index 100% rename from airflow/providers/arangodb/example_dags/example_arangodb.py rename to providers/src/airflow/providers/arangodb/example_dags/example_arangodb.py diff --git a/airflow/providers/arangodb/sensors/__init__.py b/providers/src/airflow/providers/arangodb/hooks/__init__.py similarity index 100% rename from airflow/providers/arangodb/sensors/__init__.py rename to providers/src/airflow/providers/arangodb/hooks/__init__.py diff --git a/airflow/providers/arangodb/hooks/arangodb.py b/providers/src/airflow/providers/arangodb/hooks/arangodb.py similarity index 100% rename from airflow/providers/arangodb/hooks/arangodb.py rename to providers/src/airflow/providers/arangodb/hooks/arangodb.py diff --git a/airflow/providers/atlassian/__init__.py b/providers/src/airflow/providers/arangodb/operators/__init__.py similarity index 100% rename from airflow/providers/atlassian/__init__.py rename to providers/src/airflow/providers/arangodb/operators/__init__.py diff --git a/airflow/providers/arangodb/operators/arangodb.py b/providers/src/airflow/providers/arangodb/operators/arangodb.py similarity index 100% rename from airflow/providers/arangodb/operators/arangodb.py rename to providers/src/airflow/providers/arangodb/operators/arangodb.py diff --git a/airflow/providers/arangodb/provider.yaml b/providers/src/airflow/providers/arangodb/provider.yaml similarity index 100% rename from airflow/providers/arangodb/provider.yaml rename to providers/src/airflow/providers/arangodb/provider.yaml diff --git a/airflow/providers/atlassian/jira/hooks/__init__.py b/providers/src/airflow/providers/arangodb/sensors/__init__.py similarity index 100% rename from airflow/providers/atlassian/jira/hooks/__init__.py rename to providers/src/airflow/providers/arangodb/sensors/__init__.py diff --git a/airflow/providers/arangodb/sensors/arangodb.py b/providers/src/airflow/providers/arangodb/sensors/arangodb.py similarity index 100% rename from airflow/providers/arangodb/sensors/arangodb.py rename to providers/src/airflow/providers/arangodb/sensors/arangodb.py diff --git a/airflow/providers/asana/.latest-doc-only-change.txt b/providers/src/airflow/providers/asana/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/asana/.latest-doc-only-change.txt rename to providers/src/airflow/providers/asana/.latest-doc-only-change.txt diff --git a/airflow/providers/asana/CHANGELOG.rst b/providers/src/airflow/providers/asana/CHANGELOG.rst similarity index 100% rename from airflow/providers/asana/CHANGELOG.rst rename to providers/src/airflow/providers/asana/CHANGELOG.rst diff --git a/airflow/providers/asana/README.md b/providers/src/airflow/providers/asana/README.md similarity index 100% rename from airflow/providers/asana/README.md rename to providers/src/airflow/providers/asana/README.md diff --git a/airflow/providers/asana/__init__.py b/providers/src/airflow/providers/asana/__init__.py similarity index 100% rename from airflow/providers/asana/__init__.py rename to providers/src/airflow/providers/asana/__init__.py diff --git a/airflow/providers/asana/hooks/__init__.py b/providers/src/airflow/providers/asana/hooks/__init__.py similarity index 100% rename from airflow/providers/asana/hooks/__init__.py rename to providers/src/airflow/providers/asana/hooks/__init__.py diff --git a/airflow/providers/asana/hooks/asana.py b/providers/src/airflow/providers/asana/hooks/asana.py similarity index 100% rename from airflow/providers/asana/hooks/asana.py rename to providers/src/airflow/providers/asana/hooks/asana.py diff --git a/airflow/providers/asana/operators/__init__.py b/providers/src/airflow/providers/asana/operators/__init__.py similarity index 100% rename from airflow/providers/asana/operators/__init__.py rename to providers/src/airflow/providers/asana/operators/__init__.py diff --git a/airflow/providers/asana/operators/asana_tasks.py b/providers/src/airflow/providers/asana/operators/asana_tasks.py similarity index 100% rename from airflow/providers/asana/operators/asana_tasks.py rename to providers/src/airflow/providers/asana/operators/asana_tasks.py diff --git a/airflow/providers/asana/provider.yaml b/providers/src/airflow/providers/asana/provider.yaml similarity index 100% rename from airflow/providers/asana/provider.yaml rename to providers/src/airflow/providers/asana/provider.yaml diff --git a/airflow/providers/atlassian/jira/.latest-doc-only-change.txt b/providers/src/airflow/providers/atlassian/jira/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/atlassian/jira/.latest-doc-only-change.txt rename to providers/src/airflow/providers/atlassian/jira/.latest-doc-only-change.txt diff --git a/airflow/providers/atlassian/jira/CHANGELOG.rst b/providers/src/airflow/providers/atlassian/jira/CHANGELOG.rst similarity index 100% rename from airflow/providers/atlassian/jira/CHANGELOG.rst rename to providers/src/airflow/providers/atlassian/jira/CHANGELOG.rst diff --git a/airflow/providers/atlassian/jira/__init__.py b/providers/src/airflow/providers/atlassian/jira/__init__.py similarity index 100% rename from airflow/providers/atlassian/jira/__init__.py rename to providers/src/airflow/providers/atlassian/jira/__init__.py diff --git a/airflow/providers/atlassian/jira/notifications/__init__.py b/providers/src/airflow/providers/atlassian/jira/hooks/__init__.py similarity index 100% rename from airflow/providers/atlassian/jira/notifications/__init__.py rename to providers/src/airflow/providers/atlassian/jira/hooks/__init__.py diff --git a/airflow/providers/atlassian/jira/hooks/jira.py b/providers/src/airflow/providers/atlassian/jira/hooks/jira.py similarity index 100% rename from airflow/providers/atlassian/jira/hooks/jira.py rename to providers/src/airflow/providers/atlassian/jira/hooks/jira.py diff --git a/airflow/providers/atlassian/jira/operators/__init__.py b/providers/src/airflow/providers/atlassian/jira/notifications/__init__.py similarity index 100% rename from airflow/providers/atlassian/jira/operators/__init__.py rename to providers/src/airflow/providers/atlassian/jira/notifications/__init__.py diff --git a/airflow/providers/atlassian/jira/notifications/jira.py b/providers/src/airflow/providers/atlassian/jira/notifications/jira.py similarity index 100% rename from airflow/providers/atlassian/jira/notifications/jira.py rename to providers/src/airflow/providers/atlassian/jira/notifications/jira.py diff --git a/airflow/providers/atlassian/jira/sensors/__init__.py b/providers/src/airflow/providers/atlassian/jira/operators/__init__.py similarity index 100% rename from airflow/providers/atlassian/jira/sensors/__init__.py rename to providers/src/airflow/providers/atlassian/jira/operators/__init__.py diff --git a/airflow/providers/atlassian/jira/operators/jira.py b/providers/src/airflow/providers/atlassian/jira/operators/jira.py similarity index 100% rename from airflow/providers/atlassian/jira/operators/jira.py rename to providers/src/airflow/providers/atlassian/jira/operators/jira.py diff --git a/airflow/providers/atlassian/jira/provider.yaml b/providers/src/airflow/providers/atlassian/jira/provider.yaml similarity index 100% rename from airflow/providers/atlassian/jira/provider.yaml rename to providers/src/airflow/providers/atlassian/jira/provider.yaml diff --git a/airflow/providers/celery/cli/__init__.py b/providers/src/airflow/providers/atlassian/jira/sensors/__init__.py similarity index 100% rename from airflow/providers/celery/cli/__init__.py rename to providers/src/airflow/providers/atlassian/jira/sensors/__init__.py diff --git a/airflow/providers/atlassian/jira/sensors/jira.py b/providers/src/airflow/providers/atlassian/jira/sensors/jira.py similarity index 100% rename from airflow/providers/atlassian/jira/sensors/jira.py rename to providers/src/airflow/providers/atlassian/jira/sensors/jira.py diff --git a/airflow/providers/celery/.latest-doc-only-change.txt b/providers/src/airflow/providers/celery/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/celery/.latest-doc-only-change.txt rename to providers/src/airflow/providers/celery/.latest-doc-only-change.txt diff --git a/airflow/providers/celery/CHANGELOG.rst b/providers/src/airflow/providers/celery/CHANGELOG.rst similarity index 100% rename from airflow/providers/celery/CHANGELOG.rst rename to providers/src/airflow/providers/celery/CHANGELOG.rst diff --git a/airflow/providers/celery/__init__.py b/providers/src/airflow/providers/celery/__init__.py similarity index 100% rename from airflow/providers/celery/__init__.py rename to providers/src/airflow/providers/celery/__init__.py diff --git a/airflow/providers/cncf/kubernetes/backcompat/__init__.py b/providers/src/airflow/providers/celery/cli/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/backcompat/__init__.py rename to providers/src/airflow/providers/celery/cli/__init__.py diff --git a/airflow/providers/celery/cli/celery_command.py b/providers/src/airflow/providers/celery/cli/celery_command.py similarity index 100% rename from airflow/providers/celery/cli/celery_command.py rename to providers/src/airflow/providers/celery/cli/celery_command.py diff --git a/airflow/providers/celery/executors/__init__.py b/providers/src/airflow/providers/celery/executors/__init__.py similarity index 100% rename from airflow/providers/celery/executors/__init__.py rename to providers/src/airflow/providers/celery/executors/__init__.py diff --git a/airflow/providers/celery/executors/celery_executor.py b/providers/src/airflow/providers/celery/executors/celery_executor.py similarity index 100% rename from airflow/providers/celery/executors/celery_executor.py rename to providers/src/airflow/providers/celery/executors/celery_executor.py diff --git a/airflow/providers/celery/executors/celery_executor_utils.py b/providers/src/airflow/providers/celery/executors/celery_executor_utils.py similarity index 100% rename from airflow/providers/celery/executors/celery_executor_utils.py rename to providers/src/airflow/providers/celery/executors/celery_executor_utils.py diff --git a/airflow/providers/celery/executors/celery_kubernetes_executor.py b/providers/src/airflow/providers/celery/executors/celery_kubernetes_executor.py similarity index 100% rename from airflow/providers/celery/executors/celery_kubernetes_executor.py rename to providers/src/airflow/providers/celery/executors/celery_kubernetes_executor.py diff --git a/airflow/providers/celery/executors/default_celery.py b/providers/src/airflow/providers/celery/executors/default_celery.py similarity index 100% rename from airflow/providers/celery/executors/default_celery.py rename to providers/src/airflow/providers/celery/executors/default_celery.py diff --git a/airflow/providers/celery/provider.yaml b/providers/src/airflow/providers/celery/provider.yaml similarity index 100% rename from airflow/providers/celery/provider.yaml rename to providers/src/airflow/providers/celery/provider.yaml diff --git a/airflow/providers/celery/sensors/__init__.py b/providers/src/airflow/providers/celery/sensors/__init__.py similarity index 100% rename from airflow/providers/celery/sensors/__init__.py rename to providers/src/airflow/providers/celery/sensors/__init__.py diff --git a/airflow/providers/celery/sensors/celery_queue.py b/providers/src/airflow/providers/celery/sensors/celery_queue.py similarity index 100% rename from airflow/providers/celery/sensors/celery_queue.py rename to providers/src/airflow/providers/celery/sensors/celery_queue.py diff --git a/airflow/providers/cloudant/.latest-doc-only-change.txt b/providers/src/airflow/providers/cloudant/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/cloudant/.latest-doc-only-change.txt rename to providers/src/airflow/providers/cloudant/.latest-doc-only-change.txt diff --git a/airflow/providers/cloudant/CHANGELOG.rst b/providers/src/airflow/providers/cloudant/CHANGELOG.rst similarity index 100% rename from airflow/providers/cloudant/CHANGELOG.rst rename to providers/src/airflow/providers/cloudant/CHANGELOG.rst diff --git a/airflow/providers/cloudant/__init__.py b/providers/src/airflow/providers/cloudant/__init__.py similarity index 100% rename from airflow/providers/cloudant/__init__.py rename to providers/src/airflow/providers/cloudant/__init__.py diff --git a/airflow/providers/cloudant/cloudant_fake.py b/providers/src/airflow/providers/cloudant/cloudant_fake.py similarity index 100% rename from airflow/providers/cloudant/cloudant_fake.py rename to providers/src/airflow/providers/cloudant/cloudant_fake.py diff --git a/airflow/providers/cloudant/hooks/__init__.py b/providers/src/airflow/providers/cloudant/hooks/__init__.py similarity index 100% rename from airflow/providers/cloudant/hooks/__init__.py rename to providers/src/airflow/providers/cloudant/hooks/__init__.py diff --git a/airflow/providers/cloudant/hooks/cloudant.py b/providers/src/airflow/providers/cloudant/hooks/cloudant.py similarity index 100% rename from airflow/providers/cloudant/hooks/cloudant.py rename to providers/src/airflow/providers/cloudant/hooks/cloudant.py diff --git a/airflow/providers/cloudant/provider.yaml b/providers/src/airflow/providers/cloudant/provider.yaml similarity index 100% rename from airflow/providers/cloudant/provider.yaml rename to providers/src/airflow/providers/cloudant/provider.yaml diff --git a/airflow/providers/cncf/kubernetes/.latest-doc-only-change.txt b/providers/src/airflow/providers/cncf/kubernetes/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/cncf/kubernetes/.latest-doc-only-change.txt rename to providers/src/airflow/providers/cncf/kubernetes/.latest-doc-only-change.txt diff --git a/airflow/providers/cncf/kubernetes/CHANGELOG.rst b/providers/src/airflow/providers/cncf/kubernetes/CHANGELOG.rst similarity index 100% rename from airflow/providers/cncf/kubernetes/CHANGELOG.rst rename to providers/src/airflow/providers/cncf/kubernetes/CHANGELOG.rst diff --git a/airflow/providers/cncf/kubernetes/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/__init__.py diff --git a/airflow/providers/cncf/kubernetes/cli/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/backcompat/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/cli/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/backcompat/__init__.py diff --git a/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py b/providers/src/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py similarity index 100% rename from airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py rename to providers/src/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py diff --git a/airflow/providers/cncf/kubernetes/callbacks.py b/providers/src/airflow/providers/cncf/kubernetes/callbacks.py similarity index 100% rename from airflow/providers/cncf/kubernetes/callbacks.py rename to providers/src/airflow/providers/cncf/kubernetes/callbacks.py diff --git a/airflow/providers/cncf/kubernetes/hooks/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/cli/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/hooks/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/cli/__init__.py diff --git a/airflow/providers/cncf/kubernetes/cli/kubernetes_command.py b/providers/src/airflow/providers/cncf/kubernetes/cli/kubernetes_command.py similarity index 100% rename from airflow/providers/cncf/kubernetes/cli/kubernetes_command.py rename to providers/src/airflow/providers/cncf/kubernetes/cli/kubernetes_command.py diff --git a/airflow/providers/cncf/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/decorators/__init__.py similarity index 100% rename from airflow/providers/cncf/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/decorators/__init__.py diff --git a/airflow/providers/cncf/kubernetes/decorators/kubernetes.py b/providers/src/airflow/providers/cncf/kubernetes/decorators/kubernetes.py similarity index 100% rename from airflow/providers/cncf/kubernetes/decorators/kubernetes.py rename to providers/src/airflow/providers/cncf/kubernetes/decorators/kubernetes.py diff --git a/airflow/providers/cncf/kubernetes/decorators/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/executors/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/decorators/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/executors/__init__.py diff --git a/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py b/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py similarity index 100% rename from airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py rename to providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py diff --git a/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py b/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py similarity index 100% rename from airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py rename to providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py diff --git a/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py b/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py similarity index 100% rename from airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py rename to providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py diff --git a/airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py b/providers/src/airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py similarity index 100% rename from airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py rename to providers/src/airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py diff --git a/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/hooks/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/hooks/__init__.py diff --git a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py b/providers/src/airflow/providers/cncf/kubernetes/hooks/kubernetes.py similarity index 100% rename from airflow/providers/cncf/kubernetes/hooks/kubernetes.py rename to providers/src/airflow/providers/cncf/kubernetes/hooks/kubernetes.py diff --git a/airflow/providers/cncf/kubernetes/k8s_model.py b/providers/src/airflow/providers/cncf/kubernetes/k8s_model.py similarity index 100% rename from airflow/providers/cncf/kubernetes/k8s_model.py rename to providers/src/airflow/providers/cncf/kubernetes/k8s_model.py diff --git a/airflow/providers/cncf/kubernetes/kube_client.py b/providers/src/airflow/providers/cncf/kubernetes/kube_client.py similarity index 100% rename from airflow/providers/cncf/kubernetes/kube_client.py rename to providers/src/airflow/providers/cncf/kubernetes/kube_client.py diff --git a/airflow/providers/cncf/kubernetes/kube_config.py b/providers/src/airflow/providers/cncf/kubernetes/kube_config.py similarity index 100% rename from airflow/providers/cncf/kubernetes/kube_config.py rename to providers/src/airflow/providers/cncf/kubernetes/kube_config.py diff --git a/airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py diff --git a/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml b/providers/src/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml similarity index 100% rename from airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml rename to providers/src/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml diff --git a/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py b/providers/src/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py similarity index 100% rename from airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py rename to providers/src/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py diff --git a/airflow/providers/cncf/kubernetes/executors/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/operators/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/executors/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/operators/__init__.py diff --git a/airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py b/providers/src/airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py similarity index 100% rename from airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py rename to providers/src/airflow/providers/cncf/kubernetes/operators/custom_object_launcher.py diff --git a/airflow/providers/cncf/kubernetes/operators/job.py b/providers/src/airflow/providers/cncf/kubernetes/operators/job.py similarity index 100% rename from airflow/providers/cncf/kubernetes/operators/job.py rename to providers/src/airflow/providers/cncf/kubernetes/operators/job.py diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/providers/src/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py similarity index 100% rename from airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py rename to providers/src/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py diff --git a/airflow/providers/cncf/kubernetes/operators/pod.py b/providers/src/airflow/providers/cncf/kubernetes/operators/pod.py similarity index 100% rename from airflow/providers/cncf/kubernetes/operators/pod.py rename to providers/src/airflow/providers/cncf/kubernetes/operators/pod.py diff --git a/airflow/providers/cncf/kubernetes/operators/resource.py b/providers/src/airflow/providers/cncf/kubernetes/operators/resource.py similarity index 100% rename from airflow/providers/cncf/kubernetes/operators/resource.py rename to providers/src/airflow/providers/cncf/kubernetes/operators/resource.py diff --git a/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py b/providers/src/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py similarity index 100% rename from airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py rename to providers/src/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py diff --git a/airflow/providers/cncf/kubernetes/pod_generator.py b/providers/src/airflow/providers/cncf/kubernetes/pod_generator.py similarity index 100% rename from airflow/providers/cncf/kubernetes/pod_generator.py rename to providers/src/airflow/providers/cncf/kubernetes/pod_generator.py diff --git a/airflow/providers/cncf/kubernetes/pod_generator_deprecated.py b/providers/src/airflow/providers/cncf/kubernetes/pod_generator_deprecated.py similarity index 100% rename from airflow/providers/cncf/kubernetes/pod_generator_deprecated.py rename to providers/src/airflow/providers/cncf/kubernetes/pod_generator_deprecated.py diff --git a/airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py b/providers/src/airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py similarity index 100% rename from airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py rename to providers/src/airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py diff --git a/airflow/providers/cncf/kubernetes/resource_convert/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/resource_convert/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py diff --git a/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml b/providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml similarity index 100% rename from airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml rename to providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml diff --git a/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml b/providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml similarity index 100% rename from airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml rename to providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml diff --git a/airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml b/providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml similarity index 100% rename from airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml rename to providers/src/airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml diff --git a/airflow/providers/cncf/kubernetes/provider.yaml b/providers/src/airflow/providers/cncf/kubernetes/provider.yaml similarity index 100% rename from airflow/providers/cncf/kubernetes/provider.yaml rename to providers/src/airflow/providers/cncf/kubernetes/provider.yaml diff --git a/airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2 b/providers/src/airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2 similarity index 100% rename from airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2 rename to providers/src/airflow/providers/cncf/kubernetes/python_kubernetes_script.jinja2 diff --git a/airflow/providers/cncf/kubernetes/python_kubernetes_script.py b/providers/src/airflow/providers/cncf/kubernetes/python_kubernetes_script.py similarity index 100% rename from airflow/providers/cncf/kubernetes/python_kubernetes_script.py rename to providers/src/airflow/providers/cncf/kubernetes/python_kubernetes_script.py diff --git a/airflow/providers/cncf/kubernetes/sensors/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/resource_convert/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/sensors/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/resource_convert/__init__.py diff --git a/airflow/providers/cncf/kubernetes/resource_convert/configmap.py b/providers/src/airflow/providers/cncf/kubernetes/resource_convert/configmap.py similarity index 100% rename from airflow/providers/cncf/kubernetes/resource_convert/configmap.py rename to providers/src/airflow/providers/cncf/kubernetes/resource_convert/configmap.py diff --git a/airflow/providers/cncf/kubernetes/resource_convert/env_variable.py b/providers/src/airflow/providers/cncf/kubernetes/resource_convert/env_variable.py similarity index 100% rename from airflow/providers/cncf/kubernetes/resource_convert/env_variable.py rename to providers/src/airflow/providers/cncf/kubernetes/resource_convert/env_variable.py diff --git a/airflow/providers/cncf/kubernetes/resource_convert/secret.py b/providers/src/airflow/providers/cncf/kubernetes/resource_convert/secret.py similarity index 100% rename from airflow/providers/cncf/kubernetes/resource_convert/secret.py rename to providers/src/airflow/providers/cncf/kubernetes/resource_convert/secret.py diff --git a/airflow/providers/cncf/kubernetes/secret.py b/providers/src/airflow/providers/cncf/kubernetes/secret.py similarity index 100% rename from airflow/providers/cncf/kubernetes/secret.py rename to providers/src/airflow/providers/cncf/kubernetes/secret.py diff --git a/airflow/providers/cncf/kubernetes/triggers/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/sensors/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/triggers/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/sensors/__init__.py diff --git a/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py b/providers/src/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py similarity index 100% rename from airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py rename to providers/src/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py diff --git a/airflow/providers/cncf/kubernetes/template_rendering.py b/providers/src/airflow/providers/cncf/kubernetes/template_rendering.py similarity index 100% rename from airflow/providers/cncf/kubernetes/template_rendering.py rename to providers/src/airflow/providers/cncf/kubernetes/template_rendering.py diff --git a/airflow/providers/cohere/operators/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/triggers/__init__.py similarity index 100% rename from airflow/providers/cohere/operators/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/triggers/__init__.py diff --git a/airflow/providers/cncf/kubernetes/triggers/job.py b/providers/src/airflow/providers/cncf/kubernetes/triggers/job.py similarity index 100% rename from airflow/providers/cncf/kubernetes/triggers/job.py rename to providers/src/airflow/providers/cncf/kubernetes/triggers/job.py diff --git a/airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py b/providers/src/airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py similarity index 100% rename from airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py rename to providers/src/airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py diff --git a/airflow/providers/cncf/kubernetes/triggers/pod.py b/providers/src/airflow/providers/cncf/kubernetes/triggers/pod.py similarity index 100% rename from airflow/providers/cncf/kubernetes/triggers/pod.py rename to providers/src/airflow/providers/cncf/kubernetes/triggers/pod.py diff --git a/airflow/providers/cncf/kubernetes/utils/__init__.py b/providers/src/airflow/providers/cncf/kubernetes/utils/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/utils/__init__.py rename to providers/src/airflow/providers/cncf/kubernetes/utils/__init__.py diff --git a/airflow/providers/cncf/kubernetes/utils/delete_from.py b/providers/src/airflow/providers/cncf/kubernetes/utils/delete_from.py similarity index 100% rename from airflow/providers/cncf/kubernetes/utils/delete_from.py rename to providers/src/airflow/providers/cncf/kubernetes/utils/delete_from.py diff --git a/airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py b/providers/src/airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py similarity index 100% rename from airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py rename to providers/src/airflow/providers/cncf/kubernetes/utils/k8s_resource_iterator.py diff --git a/airflow/providers/cncf/kubernetes/utils/pod_manager.py b/providers/src/airflow/providers/cncf/kubernetes/utils/pod_manager.py similarity index 100% rename from airflow/providers/cncf/kubernetes/utils/pod_manager.py rename to providers/src/airflow/providers/cncf/kubernetes/utils/pod_manager.py diff --git a/airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py b/providers/src/airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py similarity index 100% rename from airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py rename to providers/src/airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py diff --git a/airflow/providers/cohere/.latest-doc-only-change.txt b/providers/src/airflow/providers/cohere/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/cohere/.latest-doc-only-change.txt rename to providers/src/airflow/providers/cohere/.latest-doc-only-change.txt diff --git a/airflow/providers/cohere/CHANGELOG.rst b/providers/src/airflow/providers/cohere/CHANGELOG.rst similarity index 100% rename from airflow/providers/cohere/CHANGELOG.rst rename to providers/src/airflow/providers/cohere/CHANGELOG.rst diff --git a/airflow/providers/cohere/__init__.py b/providers/src/airflow/providers/cohere/__init__.py similarity index 100% rename from airflow/providers/cohere/__init__.py rename to providers/src/airflow/providers/cohere/__init__.py diff --git a/airflow/providers/cohere/hooks/__init__.py b/providers/src/airflow/providers/cohere/hooks/__init__.py similarity index 100% rename from airflow/providers/cohere/hooks/__init__.py rename to providers/src/airflow/providers/cohere/hooks/__init__.py diff --git a/airflow/providers/cohere/hooks/cohere.py b/providers/src/airflow/providers/cohere/hooks/cohere.py similarity index 100% rename from airflow/providers/cohere/hooks/cohere.py rename to providers/src/airflow/providers/cohere/hooks/cohere.py diff --git a/airflow/providers/common/__init__.py b/providers/src/airflow/providers/cohere/operators/__init__.py similarity index 100% rename from airflow/providers/common/__init__.py rename to providers/src/airflow/providers/cohere/operators/__init__.py diff --git a/airflow/providers/cohere/operators/embedding.py b/providers/src/airflow/providers/cohere/operators/embedding.py similarity index 100% rename from airflow/providers/cohere/operators/embedding.py rename to providers/src/airflow/providers/cohere/operators/embedding.py diff --git a/airflow/providers/cohere/provider.yaml b/providers/src/airflow/providers/cohere/provider.yaml similarity index 100% rename from airflow/providers/cohere/provider.yaml rename to providers/src/airflow/providers/cohere/provider.yaml diff --git a/airflow/providers/common/compat/.latest-doc-only-change.txt b/providers/src/airflow/providers/common/compat/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/common/compat/.latest-doc-only-change.txt rename to providers/src/airflow/providers/common/compat/.latest-doc-only-change.txt diff --git a/airflow/providers/common/compat/CHANGELOG.rst b/providers/src/airflow/providers/common/compat/CHANGELOG.rst similarity index 100% rename from airflow/providers/common/compat/CHANGELOG.rst rename to providers/src/airflow/providers/common/compat/CHANGELOG.rst diff --git a/airflow/providers/common/compat/__init__.py b/providers/src/airflow/providers/common/compat/__init__.py similarity index 100% rename from airflow/providers/common/compat/__init__.py rename to providers/src/airflow/providers/common/compat/__init__.py diff --git a/airflow/providers/common/compat/assets/__init__.py b/providers/src/airflow/providers/common/compat/assets/__init__.py similarity index 100% rename from airflow/providers/common/compat/assets/__init__.py rename to providers/src/airflow/providers/common/compat/assets/__init__.py diff --git a/airflow/providers/common/compat/lineage/__init__.py b/providers/src/airflow/providers/common/compat/lineage/__init__.py similarity index 100% rename from airflow/providers/common/compat/lineage/__init__.py rename to providers/src/airflow/providers/common/compat/lineage/__init__.py diff --git a/airflow/providers/common/compat/lineage/hook.py b/providers/src/airflow/providers/common/compat/lineage/hook.py similarity index 100% rename from airflow/providers/common/compat/lineage/hook.py rename to providers/src/airflow/providers/common/compat/lineage/hook.py diff --git a/airflow/providers/common/compat/openlineage/__init__.py b/providers/src/airflow/providers/common/compat/openlineage/__init__.py similarity index 100% rename from airflow/providers/common/compat/openlineage/__init__.py rename to providers/src/airflow/providers/common/compat/openlineage/__init__.py diff --git a/airflow/providers/common/compat/openlineage/facet.py b/providers/src/airflow/providers/common/compat/openlineage/facet.py similarity index 100% rename from airflow/providers/common/compat/openlineage/facet.py rename to providers/src/airflow/providers/common/compat/openlineage/facet.py diff --git a/airflow/providers/common/compat/openlineage/utils/__init__.py b/providers/src/airflow/providers/common/compat/openlineage/utils/__init__.py similarity index 100% rename from airflow/providers/common/compat/openlineage/utils/__init__.py rename to providers/src/airflow/providers/common/compat/openlineage/utils/__init__.py diff --git a/airflow/providers/common/compat/openlineage/utils/utils.py b/providers/src/airflow/providers/common/compat/openlineage/utils/utils.py similarity index 100% rename from airflow/providers/common/compat/openlineage/utils/utils.py rename to providers/src/airflow/providers/common/compat/openlineage/utils/utils.py diff --git a/airflow/providers/common/compat/provider.yaml b/providers/src/airflow/providers/common/compat/provider.yaml similarity index 100% rename from airflow/providers/common/compat/provider.yaml rename to providers/src/airflow/providers/common/compat/provider.yaml diff --git a/airflow/providers/common/compat/security/__init__.py b/providers/src/airflow/providers/common/compat/security/__init__.py similarity index 100% rename from airflow/providers/common/compat/security/__init__.py rename to providers/src/airflow/providers/common/compat/security/__init__.py diff --git a/airflow/providers/common/compat/security/permissions.py b/providers/src/airflow/providers/common/compat/security/permissions.py similarity index 100% rename from airflow/providers/common/compat/security/permissions.py rename to providers/src/airflow/providers/common/compat/security/permissions.py diff --git a/airflow/providers/common/io/.latest-doc-only-change.txt b/providers/src/airflow/providers/common/io/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/common/io/.latest-doc-only-change.txt rename to providers/src/airflow/providers/common/io/.latest-doc-only-change.txt diff --git a/airflow/providers/common/io/CHANGELOG.rst b/providers/src/airflow/providers/common/io/CHANGELOG.rst similarity index 100% rename from airflow/providers/common/io/CHANGELOG.rst rename to providers/src/airflow/providers/common/io/CHANGELOG.rst diff --git a/airflow/providers/common/io/__init__.py b/providers/src/airflow/providers/common/io/__init__.py similarity index 100% rename from airflow/providers/common/io/__init__.py rename to providers/src/airflow/providers/common/io/__init__.py diff --git a/airflow/providers/common/io/assets/__init__.py b/providers/src/airflow/providers/common/io/assets/__init__.py similarity index 100% rename from airflow/providers/common/io/assets/__init__.py rename to providers/src/airflow/providers/common/io/assets/__init__.py diff --git a/airflow/providers/common/io/operators/__init__.py b/providers/src/airflow/providers/common/io/assets/assets/__init__.py similarity index 100% rename from airflow/providers/common/io/operators/__init__.py rename to providers/src/airflow/providers/common/io/assets/assets/__init__.py diff --git a/airflow/providers/common/io/assets/file.py b/providers/src/airflow/providers/common/io/assets/file.py similarity index 100% rename from airflow/providers/common/io/assets/file.py rename to providers/src/airflow/providers/common/io/assets/file.py diff --git a/airflow/providers/common/sql/hooks/__init__.py b/providers/src/airflow/providers/common/io/operators/__init__.py similarity index 100% rename from airflow/providers/common/sql/hooks/__init__.py rename to providers/src/airflow/providers/common/io/operators/__init__.py diff --git a/airflow/providers/common/io/operators/file_transfer.py b/providers/src/airflow/providers/common/io/operators/file_transfer.py similarity index 100% rename from airflow/providers/common/io/operators/file_transfer.py rename to providers/src/airflow/providers/common/io/operators/file_transfer.py diff --git a/airflow/providers/common/io/provider.yaml b/providers/src/airflow/providers/common/io/provider.yaml similarity index 100% rename from airflow/providers/common/io/provider.yaml rename to providers/src/airflow/providers/common/io/provider.yaml diff --git a/airflow/providers/common/io/xcom/__init__.py b/providers/src/airflow/providers/common/io/xcom/__init__.py similarity index 100% rename from airflow/providers/common/io/xcom/__init__.py rename to providers/src/airflow/providers/common/io/xcom/__init__.py diff --git a/airflow/providers/common/io/xcom/backend.py b/providers/src/airflow/providers/common/io/xcom/backend.py similarity index 100% rename from airflow/providers/common/io/xcom/backend.py rename to providers/src/airflow/providers/common/io/xcom/backend.py diff --git a/airflow/providers/common/sql/.latest-doc-only-change.txt b/providers/src/airflow/providers/common/sql/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/common/sql/.latest-doc-only-change.txt rename to providers/src/airflow/providers/common/sql/.latest-doc-only-change.txt diff --git a/airflow/providers/common/sql/CHANGELOG.rst b/providers/src/airflow/providers/common/sql/CHANGELOG.rst similarity index 100% rename from airflow/providers/common/sql/CHANGELOG.rst rename to providers/src/airflow/providers/common/sql/CHANGELOG.rst diff --git a/airflow/providers/common/sql/README_API.md b/providers/src/airflow/providers/common/sql/README_API.md similarity index 100% rename from airflow/providers/common/sql/README_API.md rename to providers/src/airflow/providers/common/sql/README_API.md diff --git a/airflow/providers/common/sql/__init__.py b/providers/src/airflow/providers/common/sql/__init__.py similarity index 100% rename from airflow/providers/common/sql/__init__.py rename to providers/src/airflow/providers/common/sql/__init__.py diff --git a/airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md b/providers/src/airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md similarity index 100% rename from airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md rename to providers/src/airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md diff --git a/airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md b/providers/src/airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md similarity index 100% rename from airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md rename to providers/src/airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md diff --git a/airflow/providers/common/sql/operators/__init__.py b/providers/src/airflow/providers/common/sql/hooks/__init__.py similarity index 100% rename from airflow/providers/common/sql/operators/__init__.py rename to providers/src/airflow/providers/common/sql/hooks/__init__.py diff --git a/airflow/providers/common/sql/hooks/sql.py b/providers/src/airflow/providers/common/sql/hooks/sql.py similarity index 99% rename from airflow/providers/common/sql/hooks/sql.py rename to providers/src/airflow/providers/common/sql/hooks/sql.py index 7983808d0d579..afb66ddd13a0b 100644 --- a/airflow/providers/common/sql/hooks/sql.py +++ b/providers/src/airflow/providers/common/sql/hooks/sql.py @@ -190,7 +190,7 @@ def get_conn_id(self) -> str: return getattr(self, self.conn_name_attr) @cached_property - def placeholder(self): + def placeholder(self) -> str: placeholder = self.connection_extra.get("placeholder") if placeholder: if placeholder in SQL_PLACEHOLDERS: @@ -235,9 +235,11 @@ def connection_extra_lower(self) -> dict: """ return {k.lower(): v for k, v in self.connection_extra.items()} - def get_conn(self): + def get_conn(self) -> Any: """Return a connection object.""" db = self.connection + if self.connector is None: + raise RuntimeError(f"{type(self).__name__} didn't have `self.connector` set!") return self.connector.connect(host=db.host, port=db.port, username=db.login, schema=db.schema) def get_uri(self) -> str: @@ -570,7 +572,7 @@ def get_autocommit(self, conn) -> bool: """ return getattr(conn, "autocommit", False) and self.supports_autocommit - def get_cursor(self): + def get_cursor(self) -> Any: """Return a cursor.""" return self.get_conn().cursor() diff --git a/airflow/providers/common/sql/hooks/sql.pyi b/providers/src/airflow/providers/common/sql/hooks/sql.pyi similarity index 92% rename from airflow/providers/common/sql/hooks/sql.pyi rename to providers/src/airflow/providers/common/sql/hooks/sql.pyi index e54b033991412..0039733d966ab 100644 --- a/airflow/providers/common/sql/hooks/sql.pyi +++ b/providers/src/airflow/providers/common/sql/hooks/sql.pyi @@ -31,19 +31,14 @@ Definition of the public interface for airflow.providers.common.sql.hooks.sql isort:skip_file """ -from _typeshed import Incomplete -from airflow.exceptions import ( - AirflowException as AirflowException, - AirflowOptionalProviderFeatureException as AirflowOptionalProviderFeatureException, - AirflowProviderDeprecationWarning as AirflowProviderDeprecationWarning, -) +from _typeshed import Incomplete as Incomplete from airflow.hooks.base import BaseHook as BaseHook from airflow.models import Connection as Connection from airflow.providers.openlineage.extractors import OperatorLineage as OperatorLineage from airflow.providers.openlineage.sqlparser import DatabaseInfo as DatabaseInfo from functools import cached_property as cached_property from pandas import DataFrame as DataFrame -from sqlalchemy.engine import Inspector, URL as URL +from sqlalchemy.engine import Inspector as Inspector, URL as URL from typing import Any, Callable, Generator, Iterable, Mapping, Protocol, Sequence, TypeVar, overload T = TypeVar("T") @@ -67,7 +62,7 @@ class DbApiHook(BaseHook): def __init__(self, *args, schema: str | None = None, log_sql: bool = True, **kwargs) -> None: ... def get_conn_id(self) -> str: ... @cached_property - def placeholder(self): ... + def placeholder(self) -> str: ... @property def connection(self) -> Connection: ... @connection.setter @@ -76,7 +71,7 @@ class DbApiHook(BaseHook): def connection_extra(self) -> dict: ... @cached_property def connection_extra_lower(self) -> dict: ... - def get_conn(self): ... + def get_conn(self) -> Any: ... def get_uri(self) -> str: ... @property def sqlalchemy_url(self) -> URL: ... @@ -123,7 +118,7 @@ class DbApiHook(BaseHook): ) -> tuple | list[tuple] | list[list[tuple] | tuple] | None: ... def set_autocommit(self, conn, autocommit) -> None: ... def get_autocommit(self, conn) -> bool: ... - def get_cursor(self): ... + def get_cursor(self) -> Any: ... def insert_rows( self, table, @@ -138,7 +133,7 @@ class DbApiHook(BaseHook): ): ... def bulk_dump(self, table, tmp_file) -> None: ... def bulk_load(self, table, tmp_file) -> None: ... - def test_connection(self): ... + def test_connection(self) -> None: ... def get_openlineage_database_info(self, connection) -> DatabaseInfo | None: ... def get_openlineage_database_dialect(self, connection) -> str: ... def get_openlineage_default_schema(self) -> str | None: ... diff --git a/airflow/providers/common/sql/sensors/__init__.py b/providers/src/airflow/providers/common/sql/operators/__init__.py similarity index 100% rename from airflow/providers/common/sql/sensors/__init__.py rename to providers/src/airflow/providers/common/sql/operators/__init__.py diff --git a/airflow/providers/common/sql/operators/sql.py b/providers/src/airflow/providers/common/sql/operators/sql.py similarity index 100% rename from airflow/providers/common/sql/operators/sql.py rename to providers/src/airflow/providers/common/sql/operators/sql.py diff --git a/airflow/providers/common/sql/operators/sql.pyi b/providers/src/airflow/providers/common/sql/operators/sql.pyi similarity index 92% rename from airflow/providers/common/sql/operators/sql.pyi rename to providers/src/airflow/providers/common/sql/operators/sql.pyi index 0a63ccaa7cc38..1b97cec5023cf 100644 --- a/airflow/providers/common/sql/operators/sql.pyi +++ b/providers/src/airflow/providers/common/sql/operators/sql.pyi @@ -31,25 +31,13 @@ Definition of the public interface for airflow.providers.common.sql.operators.sql isort:skip_file """ -from _typeshed import Incomplete -from airflow.exceptions import ( - AirflowException as AirflowException, - AirflowFailException as AirflowFailException, -) -from airflow.hooks.base import BaseHook as BaseHook +from _typeshed import Incomplete as Incomplete from airflow.models import BaseOperator as BaseOperator, SkipMixin as SkipMixin -from airflow.providers.common.sql.hooks.sql import ( - DbApiHook as DbApiHook, - fetch_all_handler as fetch_all_handler, - return_single_query_results as return_single_query_results, -) +from airflow.providers.common.sql.hooks.sql import DbApiHook as DbApiHook from airflow.providers.openlineage.extractors import OperatorLineage as OperatorLineage from airflow.utils.context import Context as Context -from airflow.utils.helpers import merge_dicts as merge_dicts -from functools import cached_property as cached_property from typing import Any, Callable, Iterable, Mapping, Sequence, SupportsAbs -def _parse_boolean(val: str) -> str | bool: ... def parse_boolean(val: str) -> str | bool: ... class BaseSQLOperator(BaseOperator): @@ -97,7 +85,7 @@ class SQLExecuteQueryOperator(BaseSQLOperator): show_return_value_in_logs: bool = False, **kwargs, ) -> None: ... - def execute(self, context): ... + def execute(self, context) -> None: ... def prepare_template(self) -> None: ... def get_openlineage_facets_on_start(self) -> OperatorLineage | None: ... def get_openlineage_facets_on_complete(self, task_instance) -> OperatorLineage | None: ... diff --git a/airflow/providers/common/sql/provider.yaml b/providers/src/airflow/providers/common/sql/provider.yaml similarity index 100% rename from airflow/providers/common/sql/provider.yaml rename to providers/src/airflow/providers/common/sql/provider.yaml diff --git a/airflow/providers/databricks/plugins/__init__.py b/providers/src/airflow/providers/common/sql/sensors/__init__.py similarity index 100% rename from airflow/providers/databricks/plugins/__init__.py rename to providers/src/airflow/providers/common/sql/sensors/__init__.py diff --git a/airflow/providers/common/sql/sensors/sql.py b/providers/src/airflow/providers/common/sql/sensors/sql.py similarity index 100% rename from airflow/providers/common/sql/sensors/sql.py rename to providers/src/airflow/providers/common/sql/sensors/sql.py diff --git a/airflow/providers/common/sql/sensors/sql.pyi b/providers/src/airflow/providers/common/sql/sensors/sql.pyi similarity index 91% rename from airflow/providers/common/sql/sensors/sql.pyi rename to providers/src/airflow/providers/common/sql/sensors/sql.pyi index db92f6d6e02a8..4dcbbd0edbc9e 100644 --- a/airflow/providers/common/sql/sensors/sql.pyi +++ b/providers/src/airflow/providers/common/sql/sensors/sql.pyi @@ -31,10 +31,7 @@ Definition of the public interface for airflow.providers.common.sql.sensors.sql isort:skip_file """ -from _typeshed import Incomplete -from airflow.exceptions import AirflowException as AirflowException -from airflow.hooks.base import BaseHook as BaseHook -from airflow.providers.common.sql.hooks.sql import DbApiHook as DbApiHook +from _typeshed import Incomplete as Incomplete from airflow.sensors.base import BaseSensorOperator as BaseSensorOperator from airflow.utils.context import Context as Context from typing import Any, Callable, Mapping, Sequence diff --git a/airflow/providers/databricks/.latest-doc-only-change.txt b/providers/src/airflow/providers/databricks/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/databricks/.latest-doc-only-change.txt rename to providers/src/airflow/providers/databricks/.latest-doc-only-change.txt diff --git a/airflow/providers/databricks/CHANGELOG.rst b/providers/src/airflow/providers/databricks/CHANGELOG.rst similarity index 100% rename from airflow/providers/databricks/CHANGELOG.rst rename to providers/src/airflow/providers/databricks/CHANGELOG.rst diff --git a/airflow/providers/databricks/__init__.py b/providers/src/airflow/providers/databricks/__init__.py similarity index 100% rename from airflow/providers/databricks/__init__.py rename to providers/src/airflow/providers/databricks/__init__.py diff --git a/airflow/providers/cncf/kubernetes/operators/__init__.py b/providers/src/airflow/providers/databricks/hooks/__init__.py similarity index 100% rename from airflow/providers/cncf/kubernetes/operators/__init__.py rename to providers/src/airflow/providers/databricks/hooks/__init__.py diff --git a/airflow/providers/databricks/hooks/databricks.py b/providers/src/airflow/providers/databricks/hooks/databricks.py similarity index 100% rename from airflow/providers/databricks/hooks/databricks.py rename to providers/src/airflow/providers/databricks/hooks/databricks.py diff --git a/airflow/providers/databricks/hooks/databricks_base.py b/providers/src/airflow/providers/databricks/hooks/databricks_base.py similarity index 100% rename from airflow/providers/databricks/hooks/databricks_base.py rename to providers/src/airflow/providers/databricks/hooks/databricks_base.py diff --git a/airflow/providers/databricks/hooks/databricks_sql.py b/providers/src/airflow/providers/databricks/hooks/databricks_sql.py similarity index 100% rename from airflow/providers/databricks/hooks/databricks_sql.py rename to providers/src/airflow/providers/databricks/hooks/databricks_sql.py diff --git a/airflow/providers/databricks/hooks/__init__.py b/providers/src/airflow/providers/databricks/operators/__init__.py similarity index 100% rename from airflow/providers/databricks/hooks/__init__.py rename to providers/src/airflow/providers/databricks/operators/__init__.py diff --git a/airflow/providers/databricks/operators/databricks.py b/providers/src/airflow/providers/databricks/operators/databricks.py similarity index 100% rename from airflow/providers/databricks/operators/databricks.py rename to providers/src/airflow/providers/databricks/operators/databricks.py diff --git a/airflow/providers/databricks/operators/databricks_repos.py b/providers/src/airflow/providers/databricks/operators/databricks_repos.py similarity index 100% rename from airflow/providers/databricks/operators/databricks_repos.py rename to providers/src/airflow/providers/databricks/operators/databricks_repos.py diff --git a/airflow/providers/databricks/operators/databricks_sql.py b/providers/src/airflow/providers/databricks/operators/databricks_sql.py similarity index 100% rename from airflow/providers/databricks/operators/databricks_sql.py rename to providers/src/airflow/providers/databricks/operators/databricks_sql.py diff --git a/airflow/providers/databricks/operators/databricks_workflow.py b/providers/src/airflow/providers/databricks/operators/databricks_workflow.py similarity index 100% rename from airflow/providers/databricks/operators/databricks_workflow.py rename to providers/src/airflow/providers/databricks/operators/databricks_workflow.py diff --git a/airflow/providers/databricks/sensors/__init__.py b/providers/src/airflow/providers/databricks/plugins/__init__.py similarity index 100% rename from airflow/providers/databricks/sensors/__init__.py rename to providers/src/airflow/providers/databricks/plugins/__init__.py diff --git a/airflow/providers/databricks/plugins/databricks_workflow.py b/providers/src/airflow/providers/databricks/plugins/databricks_workflow.py similarity index 100% rename from airflow/providers/databricks/plugins/databricks_workflow.py rename to providers/src/airflow/providers/databricks/plugins/databricks_workflow.py diff --git a/airflow/providers/databricks/provider.yaml b/providers/src/airflow/providers/databricks/provider.yaml similarity index 100% rename from airflow/providers/databricks/provider.yaml rename to providers/src/airflow/providers/databricks/provider.yaml diff --git a/airflow/providers/databricks/utils/__init__.py b/providers/src/airflow/providers/databricks/sensors/__init__.py similarity index 100% rename from airflow/providers/databricks/utils/__init__.py rename to providers/src/airflow/providers/databricks/sensors/__init__.py diff --git a/airflow/providers/databricks/sensors/databricks_partition.py b/providers/src/airflow/providers/databricks/sensors/databricks_partition.py similarity index 100% rename from airflow/providers/databricks/sensors/databricks_partition.py rename to providers/src/airflow/providers/databricks/sensors/databricks_partition.py diff --git a/airflow/providers/databricks/sensors/databricks_sql.py b/providers/src/airflow/providers/databricks/sensors/databricks_sql.py similarity index 100% rename from airflow/providers/databricks/sensors/databricks_sql.py rename to providers/src/airflow/providers/databricks/sensors/databricks_sql.py diff --git a/airflow/providers/databricks/operators/__init__.py b/providers/src/airflow/providers/databricks/triggers/__init__.py similarity index 100% rename from airflow/providers/databricks/operators/__init__.py rename to providers/src/airflow/providers/databricks/triggers/__init__.py diff --git a/airflow/providers/databricks/triggers/databricks.py b/providers/src/airflow/providers/databricks/triggers/databricks.py similarity index 100% rename from airflow/providers/databricks/triggers/databricks.py rename to providers/src/airflow/providers/databricks/triggers/databricks.py diff --git a/airflow/providers/dbt/__init__.py b/providers/src/airflow/providers/databricks/utils/__init__.py similarity index 100% rename from airflow/providers/dbt/__init__.py rename to providers/src/airflow/providers/databricks/utils/__init__.py diff --git a/airflow/providers/databricks/utils/databricks.py b/providers/src/airflow/providers/databricks/utils/databricks.py similarity index 100% rename from airflow/providers/databricks/utils/databricks.py rename to providers/src/airflow/providers/databricks/utils/databricks.py diff --git a/airflow/providers/datadog/.latest-doc-only-change.txt b/providers/src/airflow/providers/datadog/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/datadog/.latest-doc-only-change.txt rename to providers/src/airflow/providers/datadog/.latest-doc-only-change.txt diff --git a/airflow/providers/datadog/CHANGELOG.rst b/providers/src/airflow/providers/datadog/CHANGELOG.rst similarity index 100% rename from airflow/providers/datadog/CHANGELOG.rst rename to providers/src/airflow/providers/datadog/CHANGELOG.rst diff --git a/airflow/providers/datadog/__init__.py b/providers/src/airflow/providers/datadog/__init__.py similarity index 100% rename from airflow/providers/datadog/__init__.py rename to providers/src/airflow/providers/datadog/__init__.py diff --git a/airflow/providers/databricks/triggers/__init__.py b/providers/src/airflow/providers/datadog/hooks/__init__.py similarity index 100% rename from airflow/providers/databricks/triggers/__init__.py rename to providers/src/airflow/providers/datadog/hooks/__init__.py diff --git a/airflow/providers/datadog/hooks/datadog.py b/providers/src/airflow/providers/datadog/hooks/datadog.py similarity index 100% rename from airflow/providers/datadog/hooks/datadog.py rename to providers/src/airflow/providers/datadog/hooks/datadog.py diff --git a/airflow/providers/datadog/provider.yaml b/providers/src/airflow/providers/datadog/provider.yaml similarity index 100% rename from airflow/providers/datadog/provider.yaml rename to providers/src/airflow/providers/datadog/provider.yaml diff --git a/airflow/providers/datadog/hooks/__init__.py b/providers/src/airflow/providers/datadog/sensors/__init__.py similarity index 100% rename from airflow/providers/datadog/hooks/__init__.py rename to providers/src/airflow/providers/datadog/sensors/__init__.py diff --git a/airflow/providers/datadog/sensors/datadog.py b/providers/src/airflow/providers/datadog/sensors/datadog.py similarity index 100% rename from airflow/providers/datadog/sensors/datadog.py rename to providers/src/airflow/providers/datadog/sensors/datadog.py diff --git a/airflow/providers/dbt/cloud/.latest-doc-only-change.txt b/providers/src/airflow/providers/dbt/cloud/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/dbt/cloud/.latest-doc-only-change.txt rename to providers/src/airflow/providers/dbt/cloud/.latest-doc-only-change.txt diff --git a/airflow/providers/dbt/cloud/CHANGELOG.rst b/providers/src/airflow/providers/dbt/cloud/CHANGELOG.rst similarity index 100% rename from airflow/providers/dbt/cloud/CHANGELOG.rst rename to providers/src/airflow/providers/dbt/cloud/CHANGELOG.rst diff --git a/airflow/providers/dbt/cloud/__init__.py b/providers/src/airflow/providers/dbt/cloud/__init__.py similarity index 100% rename from airflow/providers/dbt/cloud/__init__.py rename to providers/src/airflow/providers/dbt/cloud/__init__.py diff --git a/airflow/providers/dbt/cloud/hooks/__init__.py b/providers/src/airflow/providers/dbt/cloud/hooks/__init__.py similarity index 100% rename from airflow/providers/dbt/cloud/hooks/__init__.py rename to providers/src/airflow/providers/dbt/cloud/hooks/__init__.py diff --git a/airflow/providers/dbt/cloud/hooks/dbt.py b/providers/src/airflow/providers/dbt/cloud/hooks/dbt.py similarity index 99% rename from airflow/providers/dbt/cloud/hooks/dbt.py rename to providers/src/airflow/providers/dbt/cloud/hooks/dbt.py index 4007054be69c2..7c38001c2a767 100644 --- a/airflow/providers/dbt/cloud/hooks/dbt.py +++ b/providers/src/airflow/providers/dbt/cloud/hooks/dbt.py @@ -20,10 +20,11 @@ import json import time import warnings +from collections.abc import Sequence from enum import Enum from functools import cached_property, wraps from inspect import signature -from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, TypeVar, cast +from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast import aiohttp from asgiref.sync import sync_to_async @@ -117,7 +118,7 @@ class DbtCloudJobRunStatus(Enum): @classmethod def check_is_valid(cls, statuses: int | Sequence[int] | set[int]): """Validate input statuses are a known value.""" - if isinstance(statuses, (Sequence, Set)): + if isinstance(statuses, (Sequence, set)): for status in statuses: cls(status) else: diff --git a/airflow/providers/dbt/cloud/operators/__init__.py b/providers/src/airflow/providers/dbt/cloud/operators/__init__.py similarity index 100% rename from airflow/providers/dbt/cloud/operators/__init__.py rename to providers/src/airflow/providers/dbt/cloud/operators/__init__.py diff --git a/airflow/providers/dbt/cloud/operators/dbt.py b/providers/src/airflow/providers/dbt/cloud/operators/dbt.py similarity index 100% rename from airflow/providers/dbt/cloud/operators/dbt.py rename to providers/src/airflow/providers/dbt/cloud/operators/dbt.py diff --git a/airflow/providers/dbt/cloud/provider.yaml b/providers/src/airflow/providers/dbt/cloud/provider.yaml similarity index 100% rename from airflow/providers/dbt/cloud/provider.yaml rename to providers/src/airflow/providers/dbt/cloud/provider.yaml diff --git a/airflow/providers/datadog/sensors/__init__.py b/providers/src/airflow/providers/dbt/cloud/sensors/__init__.py similarity index 100% rename from airflow/providers/datadog/sensors/__init__.py rename to providers/src/airflow/providers/dbt/cloud/sensors/__init__.py diff --git a/airflow/providers/dbt/cloud/sensors/dbt.py b/providers/src/airflow/providers/dbt/cloud/sensors/dbt.py similarity index 100% rename from airflow/providers/dbt/cloud/sensors/dbt.py rename to providers/src/airflow/providers/dbt/cloud/sensors/dbt.py diff --git a/airflow/providers/dbt/cloud/triggers/__init__.py b/providers/src/airflow/providers/dbt/cloud/triggers/__init__.py similarity index 100% rename from airflow/providers/dbt/cloud/triggers/__init__.py rename to providers/src/airflow/providers/dbt/cloud/triggers/__init__.py diff --git a/airflow/providers/dbt/cloud/triggers/dbt.py b/providers/src/airflow/providers/dbt/cloud/triggers/dbt.py similarity index 100% rename from airflow/providers/dbt/cloud/triggers/dbt.py rename to providers/src/airflow/providers/dbt/cloud/triggers/dbt.py diff --git a/airflow/providers/dbt/cloud/utils/__init__.py b/providers/src/airflow/providers/dbt/cloud/utils/__init__.py similarity index 100% rename from airflow/providers/dbt/cloud/utils/__init__.py rename to providers/src/airflow/providers/dbt/cloud/utils/__init__.py diff --git a/airflow/providers/dbt/cloud/utils/openlineage.py b/providers/src/airflow/providers/dbt/cloud/utils/openlineage.py similarity index 100% rename from airflow/providers/dbt/cloud/utils/openlineage.py rename to providers/src/airflow/providers/dbt/cloud/utils/openlineage.py diff --git a/airflow/providers/dingding/.latest-doc-only-change.txt b/providers/src/airflow/providers/dingding/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/dingding/.latest-doc-only-change.txt rename to providers/src/airflow/providers/dingding/.latest-doc-only-change.txt diff --git a/airflow/providers/dingding/CHANGELOG.rst b/providers/src/airflow/providers/dingding/CHANGELOG.rst similarity index 100% rename from airflow/providers/dingding/CHANGELOG.rst rename to providers/src/airflow/providers/dingding/CHANGELOG.rst diff --git a/airflow/providers/dingding/__init__.py b/providers/src/airflow/providers/dingding/__init__.py similarity index 100% rename from airflow/providers/dingding/__init__.py rename to providers/src/airflow/providers/dingding/__init__.py diff --git a/airflow/providers/dbt/cloud/sensors/__init__.py b/providers/src/airflow/providers/dingding/hooks/__init__.py similarity index 100% rename from airflow/providers/dbt/cloud/sensors/__init__.py rename to providers/src/airflow/providers/dingding/hooks/__init__.py diff --git a/airflow/providers/dingding/hooks/dingding.py b/providers/src/airflow/providers/dingding/hooks/dingding.py similarity index 100% rename from airflow/providers/dingding/hooks/dingding.py rename to providers/src/airflow/providers/dingding/hooks/dingding.py diff --git a/airflow/providers/dingding/hooks/__init__.py b/providers/src/airflow/providers/dingding/operators/__init__.py similarity index 100% rename from airflow/providers/dingding/hooks/__init__.py rename to providers/src/airflow/providers/dingding/operators/__init__.py diff --git a/airflow/providers/dingding/operators/dingding.py b/providers/src/airflow/providers/dingding/operators/dingding.py similarity index 100% rename from airflow/providers/dingding/operators/dingding.py rename to providers/src/airflow/providers/dingding/operators/dingding.py diff --git a/airflow/providers/dingding/provider.yaml b/providers/src/airflow/providers/dingding/provider.yaml similarity index 100% rename from airflow/providers/dingding/provider.yaml rename to providers/src/airflow/providers/dingding/provider.yaml diff --git a/airflow/providers/discord/.latest-doc-only-change.txt b/providers/src/airflow/providers/discord/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/discord/.latest-doc-only-change.txt rename to providers/src/airflow/providers/discord/.latest-doc-only-change.txt diff --git a/airflow/providers/discord/CHANGELOG.rst b/providers/src/airflow/providers/discord/CHANGELOG.rst similarity index 100% rename from airflow/providers/discord/CHANGELOG.rst rename to providers/src/airflow/providers/discord/CHANGELOG.rst diff --git a/airflow/providers/discord/__init__.py b/providers/src/airflow/providers/discord/__init__.py similarity index 100% rename from airflow/providers/discord/__init__.py rename to providers/src/airflow/providers/discord/__init__.py diff --git a/airflow/providers/dingding/operators/__init__.py b/providers/src/airflow/providers/discord/hooks/__init__.py similarity index 100% rename from airflow/providers/dingding/operators/__init__.py rename to providers/src/airflow/providers/discord/hooks/__init__.py diff --git a/airflow/providers/discord/hooks/discord_webhook.py b/providers/src/airflow/providers/discord/hooks/discord_webhook.py similarity index 100% rename from airflow/providers/discord/hooks/discord_webhook.py rename to providers/src/airflow/providers/discord/hooks/discord_webhook.py diff --git a/airflow/providers/discord/hooks/__init__.py b/providers/src/airflow/providers/discord/notifications/__init__.py similarity index 100% rename from airflow/providers/discord/hooks/__init__.py rename to providers/src/airflow/providers/discord/notifications/__init__.py diff --git a/airflow/providers/discord/notifications/discord.py b/providers/src/airflow/providers/discord/notifications/discord.py similarity index 100% rename from airflow/providers/discord/notifications/discord.py rename to providers/src/airflow/providers/discord/notifications/discord.py diff --git a/airflow/providers/discord/notifications/__init__.py b/providers/src/airflow/providers/discord/operators/__init__.py similarity index 100% rename from airflow/providers/discord/notifications/__init__.py rename to providers/src/airflow/providers/discord/operators/__init__.py diff --git a/airflow/providers/discord/operators/discord_webhook.py b/providers/src/airflow/providers/discord/operators/discord_webhook.py similarity index 100% rename from airflow/providers/discord/operators/discord_webhook.py rename to providers/src/airflow/providers/discord/operators/discord_webhook.py diff --git a/airflow/providers/discord/provider.yaml b/providers/src/airflow/providers/discord/provider.yaml similarity index 100% rename from airflow/providers/discord/provider.yaml rename to providers/src/airflow/providers/discord/provider.yaml diff --git a/airflow/providers/docker/.latest-doc-only-change.txt b/providers/src/airflow/providers/docker/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/docker/.latest-doc-only-change.txt rename to providers/src/airflow/providers/docker/.latest-doc-only-change.txt diff --git a/airflow/providers/docker/CHANGELOG.rst b/providers/src/airflow/providers/docker/CHANGELOG.rst similarity index 100% rename from airflow/providers/docker/CHANGELOG.rst rename to providers/src/airflow/providers/docker/CHANGELOG.rst diff --git a/airflow/providers/docker/__init__.py b/providers/src/airflow/providers/docker/__init__.py similarity index 100% rename from airflow/providers/docker/__init__.py rename to providers/src/airflow/providers/docker/__init__.py diff --git a/airflow/providers/discord/operators/__init__.py b/providers/src/airflow/providers/docker/decorators/__init__.py similarity index 100% rename from airflow/providers/discord/operators/__init__.py rename to providers/src/airflow/providers/docker/decorators/__init__.py diff --git a/airflow/providers/docker/decorators/docker.py b/providers/src/airflow/providers/docker/decorators/docker.py similarity index 100% rename from airflow/providers/docker/decorators/docker.py rename to providers/src/airflow/providers/docker/decorators/docker.py diff --git a/airflow/providers/docker/exceptions.py b/providers/src/airflow/providers/docker/exceptions.py similarity index 100% rename from airflow/providers/docker/exceptions.py rename to providers/src/airflow/providers/docker/exceptions.py diff --git a/airflow/providers/docker/decorators/__init__.py b/providers/src/airflow/providers/docker/hooks/__init__.py similarity index 100% rename from airflow/providers/docker/decorators/__init__.py rename to providers/src/airflow/providers/docker/hooks/__init__.py diff --git a/airflow/providers/docker/hooks/docker.py b/providers/src/airflow/providers/docker/hooks/docker.py similarity index 100% rename from airflow/providers/docker/hooks/docker.py rename to providers/src/airflow/providers/docker/hooks/docker.py diff --git a/airflow/providers/docker/hooks/__init__.py b/providers/src/airflow/providers/docker/operators/__init__.py similarity index 100% rename from airflow/providers/docker/hooks/__init__.py rename to providers/src/airflow/providers/docker/operators/__init__.py diff --git a/airflow/providers/docker/operators/docker.py b/providers/src/airflow/providers/docker/operators/docker.py similarity index 100% rename from airflow/providers/docker/operators/docker.py rename to providers/src/airflow/providers/docker/operators/docker.py diff --git a/airflow/providers/docker/operators/docker_swarm.py b/providers/src/airflow/providers/docker/operators/docker_swarm.py similarity index 100% rename from airflow/providers/docker/operators/docker_swarm.py rename to providers/src/airflow/providers/docker/operators/docker_swarm.py diff --git a/airflow/providers/docker/provider.yaml b/providers/src/airflow/providers/docker/provider.yaml similarity index 100% rename from airflow/providers/docker/provider.yaml rename to providers/src/airflow/providers/docker/provider.yaml diff --git a/airflow/providers/edge/CHANGELOG.rst b/providers/src/airflow/providers/edge/CHANGELOG.rst similarity index 100% rename from airflow/providers/edge/CHANGELOG.rst rename to providers/src/airflow/providers/edge/CHANGELOG.rst diff --git a/airflow/providers/edge/__init__.py b/providers/src/airflow/providers/edge/__init__.py similarity index 100% rename from airflow/providers/edge/__init__.py rename to providers/src/airflow/providers/edge/__init__.py diff --git a/airflow/providers/edge/api_endpoints/__init__.py b/providers/src/airflow/providers/edge/api_endpoints/__init__.py similarity index 100% rename from airflow/providers/edge/api_endpoints/__init__.py rename to providers/src/airflow/providers/edge/api_endpoints/__init__.py diff --git a/airflow/providers/edge/api_endpoints/health_endpoint.py b/providers/src/airflow/providers/edge/api_endpoints/health_endpoint.py similarity index 100% rename from airflow/providers/edge/api_endpoints/health_endpoint.py rename to providers/src/airflow/providers/edge/api_endpoints/health_endpoint.py diff --git a/airflow/providers/edge/api_endpoints/rpc_api_endpoint.py b/providers/src/airflow/providers/edge/api_endpoints/rpc_api_endpoint.py similarity index 100% rename from airflow/providers/edge/api_endpoints/rpc_api_endpoint.py rename to providers/src/airflow/providers/edge/api_endpoints/rpc_api_endpoint.py diff --git a/airflow/providers/edge/cli/__init__.py b/providers/src/airflow/providers/edge/cli/__init__.py similarity index 100% rename from airflow/providers/edge/cli/__init__.py rename to providers/src/airflow/providers/edge/cli/__init__.py diff --git a/airflow/providers/edge/cli/edge_command.py b/providers/src/airflow/providers/edge/cli/edge_command.py similarity index 100% rename from airflow/providers/edge/cli/edge_command.py rename to providers/src/airflow/providers/edge/cli/edge_command.py diff --git a/airflow/providers/edge/example_dags/__init__.py b/providers/src/airflow/providers/edge/example_dags/__init__.py similarity index 100% rename from airflow/providers/edge/example_dags/__init__.py rename to providers/src/airflow/providers/edge/example_dags/__init__.py diff --git a/airflow/providers/edge/example_dags/integration_test.py b/providers/src/airflow/providers/edge/example_dags/integration_test.py similarity index 100% rename from airflow/providers/edge/example_dags/integration_test.py rename to providers/src/airflow/providers/edge/example_dags/integration_test.py diff --git a/airflow/providers/edge/models/__init__.py b/providers/src/airflow/providers/edge/models/__init__.py similarity index 100% rename from airflow/providers/edge/models/__init__.py rename to providers/src/airflow/providers/edge/models/__init__.py diff --git a/airflow/providers/edge/models/edge_job.py b/providers/src/airflow/providers/edge/models/edge_job.py similarity index 100% rename from airflow/providers/edge/models/edge_job.py rename to providers/src/airflow/providers/edge/models/edge_job.py diff --git a/airflow/providers/edge/models/edge_logs.py b/providers/src/airflow/providers/edge/models/edge_logs.py similarity index 100% rename from airflow/providers/edge/models/edge_logs.py rename to providers/src/airflow/providers/edge/models/edge_logs.py diff --git a/airflow/providers/edge/models/edge_worker.py b/providers/src/airflow/providers/edge/models/edge_worker.py similarity index 100% rename from airflow/providers/edge/models/edge_worker.py rename to providers/src/airflow/providers/edge/models/edge_worker.py diff --git a/airflow/providers/edge/openapi/__init__.py b/providers/src/airflow/providers/edge/openapi/__init__.py similarity index 100% rename from airflow/providers/edge/openapi/__init__.py rename to providers/src/airflow/providers/edge/openapi/__init__.py diff --git a/airflow/providers/edge/openapi/edge_worker_api_v1.yaml b/providers/src/airflow/providers/edge/openapi/edge_worker_api_v1.yaml similarity index 100% rename from airflow/providers/edge/openapi/edge_worker_api_v1.yaml rename to providers/src/airflow/providers/edge/openapi/edge_worker_api_v1.yaml diff --git a/airflow/providers/edge/plugins/__init__.py b/providers/src/airflow/providers/edge/plugins/__init__.py similarity index 100% rename from airflow/providers/edge/plugins/__init__.py rename to providers/src/airflow/providers/edge/plugins/__init__.py diff --git a/airflow/providers/edge/plugins/edge_executor_plugin.py b/providers/src/airflow/providers/edge/plugins/edge_executor_plugin.py similarity index 100% rename from airflow/providers/edge/plugins/edge_executor_plugin.py rename to providers/src/airflow/providers/edge/plugins/edge_executor_plugin.py diff --git a/airflow/providers/edge/plugins/templates/edge_worker_hosts.html b/providers/src/airflow/providers/edge/plugins/templates/edge_worker_hosts.html similarity index 100% rename from airflow/providers/edge/plugins/templates/edge_worker_hosts.html rename to providers/src/airflow/providers/edge/plugins/templates/edge_worker_hosts.html diff --git a/airflow/providers/edge/plugins/templates/edge_worker_jobs.html b/providers/src/airflow/providers/edge/plugins/templates/edge_worker_jobs.html similarity index 100% rename from airflow/providers/edge/plugins/templates/edge_worker_jobs.html rename to providers/src/airflow/providers/edge/plugins/templates/edge_worker_jobs.html diff --git a/airflow/providers/edge/provider.yaml b/providers/src/airflow/providers/edge/provider.yaml similarity index 100% rename from airflow/providers/edge/provider.yaml rename to providers/src/airflow/providers/edge/provider.yaml diff --git a/airflow/providers/elasticsearch/.latest-doc-only-change.txt b/providers/src/airflow/providers/elasticsearch/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/elasticsearch/.latest-doc-only-change.txt rename to providers/src/airflow/providers/elasticsearch/.latest-doc-only-change.txt diff --git a/airflow/providers/elasticsearch/CHANGELOG.rst b/providers/src/airflow/providers/elasticsearch/CHANGELOG.rst similarity index 100% rename from airflow/providers/elasticsearch/CHANGELOG.rst rename to providers/src/airflow/providers/elasticsearch/CHANGELOG.rst diff --git a/airflow/providers/elasticsearch/__init__.py b/providers/src/airflow/providers/elasticsearch/__init__.py similarity index 100% rename from airflow/providers/elasticsearch/__init__.py rename to providers/src/airflow/providers/elasticsearch/__init__.py diff --git a/airflow/providers/docker/operators/__init__.py b/providers/src/airflow/providers/elasticsearch/hooks/__init__.py similarity index 100% rename from airflow/providers/docker/operators/__init__.py rename to providers/src/airflow/providers/elasticsearch/hooks/__init__.py diff --git a/airflow/providers/elasticsearch/hooks/elasticsearch.py b/providers/src/airflow/providers/elasticsearch/hooks/elasticsearch.py similarity index 100% rename from airflow/providers/elasticsearch/hooks/elasticsearch.py rename to providers/src/airflow/providers/elasticsearch/hooks/elasticsearch.py diff --git a/airflow/providers/elasticsearch/log/__init__.py b/providers/src/airflow/providers/elasticsearch/log/__init__.py similarity index 100% rename from airflow/providers/elasticsearch/log/__init__.py rename to providers/src/airflow/providers/elasticsearch/log/__init__.py diff --git a/airflow/providers/elasticsearch/log/es_json_formatter.py b/providers/src/airflow/providers/elasticsearch/log/es_json_formatter.py similarity index 100% rename from airflow/providers/elasticsearch/log/es_json_formatter.py rename to providers/src/airflow/providers/elasticsearch/log/es_json_formatter.py diff --git a/airflow/providers/elasticsearch/log/es_response.py b/providers/src/airflow/providers/elasticsearch/log/es_response.py similarity index 100% rename from airflow/providers/elasticsearch/log/es_response.py rename to providers/src/airflow/providers/elasticsearch/log/es_response.py diff --git a/airflow/providers/elasticsearch/log/es_task_handler.py b/providers/src/airflow/providers/elasticsearch/log/es_task_handler.py similarity index 100% rename from airflow/providers/elasticsearch/log/es_task_handler.py rename to providers/src/airflow/providers/elasticsearch/log/es_task_handler.py diff --git a/airflow/providers/elasticsearch/provider.yaml b/providers/src/airflow/providers/elasticsearch/provider.yaml similarity index 100% rename from airflow/providers/elasticsearch/provider.yaml rename to providers/src/airflow/providers/elasticsearch/provider.yaml diff --git a/airflow/providers/exasol/.latest-doc-only-change.txt b/providers/src/airflow/providers/exasol/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/exasol/.latest-doc-only-change.txt rename to providers/src/airflow/providers/exasol/.latest-doc-only-change.txt diff --git a/airflow/providers/exasol/CHANGELOG.rst b/providers/src/airflow/providers/exasol/CHANGELOG.rst similarity index 100% rename from airflow/providers/exasol/CHANGELOG.rst rename to providers/src/airflow/providers/exasol/CHANGELOG.rst diff --git a/airflow/providers/exasol/__init__.py b/providers/src/airflow/providers/exasol/__init__.py similarity index 100% rename from airflow/providers/exasol/__init__.py rename to providers/src/airflow/providers/exasol/__init__.py diff --git a/airflow/providers/elasticsearch/hooks/__init__.py b/providers/src/airflow/providers/exasol/hooks/__init__.py similarity index 100% rename from airflow/providers/elasticsearch/hooks/__init__.py rename to providers/src/airflow/providers/exasol/hooks/__init__.py diff --git a/airflow/providers/exasol/hooks/exasol.py b/providers/src/airflow/providers/exasol/hooks/exasol.py similarity index 100% rename from airflow/providers/exasol/hooks/exasol.py rename to providers/src/airflow/providers/exasol/hooks/exasol.py diff --git a/airflow/providers/exasol/hooks/__init__.py b/providers/src/airflow/providers/exasol/operators/__init__.py similarity index 100% rename from airflow/providers/exasol/hooks/__init__.py rename to providers/src/airflow/providers/exasol/operators/__init__.py diff --git a/airflow/providers/exasol/operators/exasol.py b/providers/src/airflow/providers/exasol/operators/exasol.py similarity index 100% rename from airflow/providers/exasol/operators/exasol.py rename to providers/src/airflow/providers/exasol/operators/exasol.py diff --git a/airflow/providers/exasol/provider.yaml b/providers/src/airflow/providers/exasol/provider.yaml similarity index 100% rename from airflow/providers/exasol/provider.yaml rename to providers/src/airflow/providers/exasol/provider.yaml diff --git a/airflow/providers/fab/CHANGELOG.rst b/providers/src/airflow/providers/fab/CHANGELOG.rst similarity index 100% rename from airflow/providers/fab/CHANGELOG.rst rename to providers/src/airflow/providers/fab/CHANGELOG.rst diff --git a/airflow/providers/fab/__init__.py b/providers/src/airflow/providers/fab/__init__.py similarity index 100% rename from airflow/providers/fab/__init__.py rename to providers/src/airflow/providers/fab/__init__.py diff --git a/airflow/providers/fab/alembic.ini b/providers/src/airflow/providers/fab/alembic.ini similarity index 100% rename from airflow/providers/fab/alembic.ini rename to providers/src/airflow/providers/fab/alembic.ini diff --git a/airflow/providers/exasol/operators/__init__.py b/providers/src/airflow/providers/fab/auth_manager/__init__.py similarity index 100% rename from airflow/providers/exasol/operators/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/__init__.py diff --git a/airflow/providers/fab/auth_manager/__init__.py b/providers/src/airflow/providers/fab/auth_manager/api/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/api/__init__.py diff --git a/airflow/providers/fab/auth_manager/api/__init__.py b/providers/src/airflow/providers/fab/auth_manager/api/auth/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/api/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/api/auth/__init__.py diff --git a/airflow/providers/fab/auth_manager/api/auth/__init__.py b/providers/src/airflow/providers/fab/auth_manager/api/auth/backend/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/api/auth/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/api/auth/backend/__init__.py diff --git a/airflow/providers/fab/auth_manager/api/auth/backend/basic_auth.py b/providers/src/airflow/providers/fab/auth_manager/api/auth/backend/basic_auth.py similarity index 100% rename from airflow/providers/fab/auth_manager/api/auth/backend/basic_auth.py rename to providers/src/airflow/providers/fab/auth_manager/api/auth/backend/basic_auth.py diff --git a/airflow/providers/fab/auth_manager/api/auth/backend/kerberos_auth.py b/providers/src/airflow/providers/fab/auth_manager/api/auth/backend/kerberos_auth.py similarity index 100% rename from airflow/providers/fab/auth_manager/api/auth/backend/kerberos_auth.py rename to providers/src/airflow/providers/fab/auth_manager/api/auth/backend/kerberos_auth.py diff --git a/airflow/providers/fab/auth_manager/api_endpoints/__init__.py b/providers/src/airflow/providers/fab/auth_manager/api_endpoints/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/api_endpoints/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/api_endpoints/__init__.py diff --git a/airflow/providers/fab/auth_manager/api_endpoints/role_and_permission_endpoint.py b/providers/src/airflow/providers/fab/auth_manager/api_endpoints/role_and_permission_endpoint.py similarity index 100% rename from airflow/providers/fab/auth_manager/api_endpoints/role_and_permission_endpoint.py rename to providers/src/airflow/providers/fab/auth_manager/api_endpoints/role_and_permission_endpoint.py diff --git a/airflow/providers/fab/auth_manager/api_endpoints/user_endpoint.py b/providers/src/airflow/providers/fab/auth_manager/api_endpoints/user_endpoint.py similarity index 100% rename from airflow/providers/fab/auth_manager/api_endpoints/user_endpoint.py rename to providers/src/airflow/providers/fab/auth_manager/api_endpoints/user_endpoint.py diff --git a/airflow/providers/fab/auth_manager/api/auth/backend/__init__.py b/providers/src/airflow/providers/fab/auth_manager/cli_commands/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/api/auth/backend/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/cli_commands/__init__.py diff --git a/airflow/providers/fab/auth_manager/cli_commands/db_command.py b/providers/src/airflow/providers/fab/auth_manager/cli_commands/db_command.py similarity index 100% rename from airflow/providers/fab/auth_manager/cli_commands/db_command.py rename to providers/src/airflow/providers/fab/auth_manager/cli_commands/db_command.py diff --git a/airflow/providers/fab/auth_manager/cli_commands/definition.py b/providers/src/airflow/providers/fab/auth_manager/cli_commands/definition.py similarity index 100% rename from airflow/providers/fab/auth_manager/cli_commands/definition.py rename to providers/src/airflow/providers/fab/auth_manager/cli_commands/definition.py diff --git a/airflow/providers/fab/auth_manager/cli_commands/role_command.py b/providers/src/airflow/providers/fab/auth_manager/cli_commands/role_command.py similarity index 100% rename from airflow/providers/fab/auth_manager/cli_commands/role_command.py rename to providers/src/airflow/providers/fab/auth_manager/cli_commands/role_command.py diff --git a/airflow/providers/fab/auth_manager/cli_commands/sync_perm_command.py b/providers/src/airflow/providers/fab/auth_manager/cli_commands/sync_perm_command.py similarity index 100% rename from airflow/providers/fab/auth_manager/cli_commands/sync_perm_command.py rename to providers/src/airflow/providers/fab/auth_manager/cli_commands/sync_perm_command.py diff --git a/airflow/providers/fab/auth_manager/cli_commands/user_command.py b/providers/src/airflow/providers/fab/auth_manager/cli_commands/user_command.py similarity index 100% rename from airflow/providers/fab/auth_manager/cli_commands/user_command.py rename to providers/src/airflow/providers/fab/auth_manager/cli_commands/user_command.py diff --git a/airflow/providers/fab/auth_manager/cli_commands/utils.py b/providers/src/airflow/providers/fab/auth_manager/cli_commands/utils.py similarity index 100% rename from airflow/providers/fab/auth_manager/cli_commands/utils.py rename to providers/src/airflow/providers/fab/auth_manager/cli_commands/utils.py diff --git a/airflow/providers/fab/auth_manager/cli_commands/__init__.py b/providers/src/airflow/providers/fab/auth_manager/decorators/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/cli_commands/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/decorators/__init__.py diff --git a/airflow/providers/fab/auth_manager/decorators/auth.py b/providers/src/airflow/providers/fab/auth_manager/decorators/auth.py similarity index 100% rename from airflow/providers/fab/auth_manager/decorators/auth.py rename to providers/src/airflow/providers/fab/auth_manager/decorators/auth.py diff --git a/airflow/providers/fab/auth_manager/fab_auth_manager.py b/providers/src/airflow/providers/fab/auth_manager/fab_auth_manager.py similarity index 100% rename from airflow/providers/fab/auth_manager/fab_auth_manager.py rename to providers/src/airflow/providers/fab/auth_manager/fab_auth_manager.py diff --git a/airflow/providers/fab/auth_manager/models/__init__.py b/providers/src/airflow/providers/fab/auth_manager/models/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/models/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/models/__init__.py diff --git a/airflow/providers/fab/auth_manager/models/anonymous_user.py b/providers/src/airflow/providers/fab/auth_manager/models/anonymous_user.py similarity index 100% rename from airflow/providers/fab/auth_manager/models/anonymous_user.py rename to providers/src/airflow/providers/fab/auth_manager/models/anonymous_user.py diff --git a/airflow/providers/fab/auth_manager/models/db.py b/providers/src/airflow/providers/fab/auth_manager/models/db.py similarity index 94% rename from airflow/providers/fab/auth_manager/models/db.py rename to providers/src/airflow/providers/fab/auth_manager/models/db.py index efca4a1041827..ce0efef55a1cd 100644 --- a/airflow/providers/fab/auth_manager/models/db.py +++ b/providers/src/airflow/providers/fab/auth_manager/models/db.py @@ -16,16 +16,15 @@ # under the License. from __future__ import annotations -import os +from pathlib import Path -import airflow from airflow import settings from airflow.exceptions import AirflowException from airflow.providers.fab.auth_manager.models import metadata from airflow.utils.db import _offline_migration, print_happy_cat from airflow.utils.db_manager import BaseDBManager -PACKAGE_DIR = os.path.dirname(airflow.__file__) +PACKAGE_DIR = Path(__file__).parents[2] _REVISION_HEADS_MAP: dict[str, str] = { "1.4.0": "6709f7a774b9", @@ -37,8 +36,8 @@ class FABDBManager(BaseDBManager): metadata = metadata version_table_name = "alembic_version_fab" - migration_dir = os.path.join(PACKAGE_DIR, "providers/fab/migrations") - alembic_file = os.path.join(PACKAGE_DIR, "providers/fab/alembic.ini") + migration_dir = (PACKAGE_DIR / "migrations").as_posix() + alembic_file = (PACKAGE_DIR / "alembic.ini").as_posix() supports_table_dropping = True def upgradedb(self, to_revision=None, from_revision=None, show_sql_only=False): diff --git a/airflow/providers/fab/auth_manager/openapi/__init__.py b/providers/src/airflow/providers/fab/auth_manager/openapi/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/openapi/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/openapi/__init__.py diff --git a/airflow/providers/fab/auth_manager/openapi/v1.yaml b/providers/src/airflow/providers/fab/auth_manager/openapi/v1.yaml similarity index 100% rename from airflow/providers/fab/auth_manager/openapi/v1.yaml rename to providers/src/airflow/providers/fab/auth_manager/openapi/v1.yaml diff --git a/airflow/providers/fab/auth_manager/decorators/__init__.py b/providers/src/airflow/providers/fab/auth_manager/security_manager/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/decorators/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/security_manager/__init__.py diff --git a/airflow/providers/fab/auth_manager/security_manager/constants.py b/providers/src/airflow/providers/fab/auth_manager/security_manager/constants.py similarity index 100% rename from airflow/providers/fab/auth_manager/security_manager/constants.py rename to providers/src/airflow/providers/fab/auth_manager/security_manager/constants.py diff --git a/airflow/providers/fab/auth_manager/security_manager/override.py b/providers/src/airflow/providers/fab/auth_manager/security_manager/override.py similarity index 100% rename from airflow/providers/fab/auth_manager/security_manager/override.py rename to providers/src/airflow/providers/fab/auth_manager/security_manager/override.py diff --git a/airflow/providers/fab/auth_manager/views/__init__.py b/providers/src/airflow/providers/fab/auth_manager/views/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/views/__init__.py rename to providers/src/airflow/providers/fab/auth_manager/views/__init__.py diff --git a/airflow/providers/fab/auth_manager/views/permissions.py b/providers/src/airflow/providers/fab/auth_manager/views/permissions.py similarity index 100% rename from airflow/providers/fab/auth_manager/views/permissions.py rename to providers/src/airflow/providers/fab/auth_manager/views/permissions.py diff --git a/airflow/providers/fab/auth_manager/views/roles_list.py b/providers/src/airflow/providers/fab/auth_manager/views/roles_list.py similarity index 100% rename from airflow/providers/fab/auth_manager/views/roles_list.py rename to providers/src/airflow/providers/fab/auth_manager/views/roles_list.py diff --git a/airflow/providers/fab/auth_manager/views/user.py b/providers/src/airflow/providers/fab/auth_manager/views/user.py similarity index 100% rename from airflow/providers/fab/auth_manager/views/user.py rename to providers/src/airflow/providers/fab/auth_manager/views/user.py diff --git a/airflow/providers/fab/auth_manager/views/user_edit.py b/providers/src/airflow/providers/fab/auth_manager/views/user_edit.py similarity index 100% rename from airflow/providers/fab/auth_manager/views/user_edit.py rename to providers/src/airflow/providers/fab/auth_manager/views/user_edit.py diff --git a/airflow/providers/fab/auth_manager/views/user_stats.py b/providers/src/airflow/providers/fab/auth_manager/views/user_stats.py similarity index 100% rename from airflow/providers/fab/auth_manager/views/user_stats.py rename to providers/src/airflow/providers/fab/auth_manager/views/user_stats.py diff --git a/airflow/providers/fab/migrations/README b/providers/src/airflow/providers/fab/migrations/README similarity index 100% rename from airflow/providers/fab/migrations/README rename to providers/src/airflow/providers/fab/migrations/README diff --git a/airflow/providers/fab/migrations/__init__.py b/providers/src/airflow/providers/fab/migrations/__init__.py similarity index 100% rename from airflow/providers/fab/migrations/__init__.py rename to providers/src/airflow/providers/fab/migrations/__init__.py diff --git a/airflow/providers/fab/migrations/env.py b/providers/src/airflow/providers/fab/migrations/env.py similarity index 100% rename from airflow/providers/fab/migrations/env.py rename to providers/src/airflow/providers/fab/migrations/env.py diff --git a/airflow/providers/fab/migrations/script.py.mako b/providers/src/airflow/providers/fab/migrations/script.py.mako similarity index 100% rename from airflow/providers/fab/migrations/script.py.mako rename to providers/src/airflow/providers/fab/migrations/script.py.mako diff --git a/airflow/providers/fab/migrations/versions/0001_1_4_0_placeholder_migration.py b/providers/src/airflow/providers/fab/migrations/versions/0001_1_4_0_placeholder_migration.py similarity index 100% rename from airflow/providers/fab/migrations/versions/0001_1_4_0_placeholder_migration.py rename to providers/src/airflow/providers/fab/migrations/versions/0001_1_4_0_placeholder_migration.py diff --git a/airflow/providers/fab/migrations/versions/__init__.py b/providers/src/airflow/providers/fab/migrations/versions/__init__.py similarity index 100% rename from airflow/providers/fab/migrations/versions/__init__.py rename to providers/src/airflow/providers/fab/migrations/versions/__init__.py diff --git a/airflow/providers/fab/provider.yaml b/providers/src/airflow/providers/fab/provider.yaml similarity index 100% rename from airflow/providers/fab/provider.yaml rename to providers/src/airflow/providers/fab/provider.yaml diff --git a/airflow/providers/facebook/.latest-doc-only-change.txt b/providers/src/airflow/providers/facebook/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/facebook/.latest-doc-only-change.txt rename to providers/src/airflow/providers/facebook/.latest-doc-only-change.txt diff --git a/airflow/providers/facebook/CHANGELOG.rst b/providers/src/airflow/providers/facebook/CHANGELOG.rst similarity index 100% rename from airflow/providers/facebook/CHANGELOG.rst rename to providers/src/airflow/providers/facebook/CHANGELOG.rst diff --git a/airflow/providers/facebook/__init__.py b/providers/src/airflow/providers/facebook/__init__.py similarity index 100% rename from airflow/providers/facebook/__init__.py rename to providers/src/airflow/providers/facebook/__init__.py diff --git a/airflow/providers/facebook/ads/__init__.py b/providers/src/airflow/providers/facebook/ads/__init__.py similarity index 100% rename from airflow/providers/facebook/ads/__init__.py rename to providers/src/airflow/providers/facebook/ads/__init__.py diff --git a/airflow/providers/facebook/ads/hooks/__init__.py b/providers/src/airflow/providers/facebook/ads/hooks/__init__.py similarity index 100% rename from airflow/providers/facebook/ads/hooks/__init__.py rename to providers/src/airflow/providers/facebook/ads/hooks/__init__.py diff --git a/airflow/providers/facebook/ads/hooks/ads.py b/providers/src/airflow/providers/facebook/ads/hooks/ads.py similarity index 100% rename from airflow/providers/facebook/ads/hooks/ads.py rename to providers/src/airflow/providers/facebook/ads/hooks/ads.py diff --git a/airflow/providers/facebook/provider.yaml b/providers/src/airflow/providers/facebook/provider.yaml similarity index 100% rename from airflow/providers/facebook/provider.yaml rename to providers/src/airflow/providers/facebook/provider.yaml diff --git a/airflow/providers/ftp/.latest-doc-only-change.txt b/providers/src/airflow/providers/ftp/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/ftp/.latest-doc-only-change.txt rename to providers/src/airflow/providers/ftp/.latest-doc-only-change.txt diff --git a/airflow/providers/ftp/CHANGELOG.rst b/providers/src/airflow/providers/ftp/CHANGELOG.rst similarity index 100% rename from airflow/providers/ftp/CHANGELOG.rst rename to providers/src/airflow/providers/ftp/CHANGELOG.rst diff --git a/airflow/providers/ftp/__init__.py b/providers/src/airflow/providers/ftp/__init__.py similarity index 100% rename from airflow/providers/ftp/__init__.py rename to providers/src/airflow/providers/ftp/__init__.py diff --git a/airflow/providers/fab/auth_manager/security_manager/__init__.py b/providers/src/airflow/providers/ftp/hooks/__init__.py similarity index 100% rename from airflow/providers/fab/auth_manager/security_manager/__init__.py rename to providers/src/airflow/providers/ftp/hooks/__init__.py diff --git a/airflow/providers/ftp/hooks/ftp.py b/providers/src/airflow/providers/ftp/hooks/ftp.py similarity index 100% rename from airflow/providers/ftp/hooks/ftp.py rename to providers/src/airflow/providers/ftp/hooks/ftp.py diff --git a/airflow/providers/ftp/operators/__init__.py b/providers/src/airflow/providers/ftp/operators/__init__.py similarity index 100% rename from airflow/providers/ftp/operators/__init__.py rename to providers/src/airflow/providers/ftp/operators/__init__.py diff --git a/airflow/providers/ftp/operators/ftp.py b/providers/src/airflow/providers/ftp/operators/ftp.py similarity index 100% rename from airflow/providers/ftp/operators/ftp.py rename to providers/src/airflow/providers/ftp/operators/ftp.py diff --git a/airflow/providers/ftp/provider.yaml b/providers/src/airflow/providers/ftp/provider.yaml similarity index 100% rename from airflow/providers/ftp/provider.yaml rename to providers/src/airflow/providers/ftp/provider.yaml diff --git a/airflow/providers/ftp/hooks/__init__.py b/providers/src/airflow/providers/ftp/sensors/__init__.py similarity index 100% rename from airflow/providers/ftp/hooks/__init__.py rename to providers/src/airflow/providers/ftp/sensors/__init__.py diff --git a/airflow/providers/ftp/sensors/ftp.py b/providers/src/airflow/providers/ftp/sensors/ftp.py similarity index 100% rename from airflow/providers/ftp/sensors/ftp.py rename to providers/src/airflow/providers/ftp/sensors/ftp.py diff --git a/airflow/providers/github/.latest-doc-only-change.txt b/providers/src/airflow/providers/github/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/github/.latest-doc-only-change.txt rename to providers/src/airflow/providers/github/.latest-doc-only-change.txt diff --git a/airflow/providers/github/CHANGELOG.rst b/providers/src/airflow/providers/github/CHANGELOG.rst similarity index 100% rename from airflow/providers/github/CHANGELOG.rst rename to providers/src/airflow/providers/github/CHANGELOG.rst diff --git a/airflow/providers/github/__init__.py b/providers/src/airflow/providers/github/__init__.py similarity index 100% rename from airflow/providers/github/__init__.py rename to providers/src/airflow/providers/github/__init__.py diff --git a/airflow/providers/github/hooks/__init__.py b/providers/src/airflow/providers/github/hooks/__init__.py similarity index 100% rename from airflow/providers/github/hooks/__init__.py rename to providers/src/airflow/providers/github/hooks/__init__.py diff --git a/airflow/providers/github/hooks/github.py b/providers/src/airflow/providers/github/hooks/github.py similarity index 100% rename from airflow/providers/github/hooks/github.py rename to providers/src/airflow/providers/github/hooks/github.py diff --git a/airflow/providers/github/operators/__init__.py b/providers/src/airflow/providers/github/operators/__init__.py similarity index 100% rename from airflow/providers/github/operators/__init__.py rename to providers/src/airflow/providers/github/operators/__init__.py diff --git a/airflow/providers/github/operators/github.py b/providers/src/airflow/providers/github/operators/github.py similarity index 100% rename from airflow/providers/github/operators/github.py rename to providers/src/airflow/providers/github/operators/github.py diff --git a/airflow/providers/github/provider.yaml b/providers/src/airflow/providers/github/provider.yaml similarity index 100% rename from airflow/providers/github/provider.yaml rename to providers/src/airflow/providers/github/provider.yaml diff --git a/airflow/providers/github/sensors/__init__.py b/providers/src/airflow/providers/github/sensors/__init__.py similarity index 100% rename from airflow/providers/github/sensors/__init__.py rename to providers/src/airflow/providers/github/sensors/__init__.py diff --git a/airflow/providers/github/sensors/github.py b/providers/src/airflow/providers/github/sensors/github.py similarity index 100% rename from airflow/providers/github/sensors/github.py rename to providers/src/airflow/providers/github/sensors/github.py diff --git a/airflow/providers/google/.latest-doc-only-change.txt b/providers/src/airflow/providers/google/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/google/.latest-doc-only-change.txt rename to providers/src/airflow/providers/google/.latest-doc-only-change.txt diff --git a/airflow/providers/google/CHANGELOG.rst b/providers/src/airflow/providers/google/CHANGELOG.rst similarity index 100% rename from airflow/providers/google/CHANGELOG.rst rename to providers/src/airflow/providers/google/CHANGELOG.rst diff --git a/airflow/providers/google/__init__.py b/providers/src/airflow/providers/google/__init__.py similarity index 100% rename from airflow/providers/google/__init__.py rename to providers/src/airflow/providers/google/__init__.py diff --git a/airflow/providers/google/ads/.gitignore b/providers/src/airflow/providers/google/ads/.gitignore similarity index 100% rename from airflow/providers/google/ads/.gitignore rename to providers/src/airflow/providers/google/ads/.gitignore diff --git a/airflow/providers/google/ads/__init__.py b/providers/src/airflow/providers/google/ads/__init__.py similarity index 100% rename from airflow/providers/google/ads/__init__.py rename to providers/src/airflow/providers/google/ads/__init__.py diff --git a/airflow/providers/google/ads/hooks/__init__.py b/providers/src/airflow/providers/google/ads/hooks/__init__.py similarity index 100% rename from airflow/providers/google/ads/hooks/__init__.py rename to providers/src/airflow/providers/google/ads/hooks/__init__.py diff --git a/airflow/providers/google/ads/hooks/ads.py b/providers/src/airflow/providers/google/ads/hooks/ads.py similarity index 100% rename from airflow/providers/google/ads/hooks/ads.py rename to providers/src/airflow/providers/google/ads/hooks/ads.py diff --git a/airflow/providers/google/ads/operators/__init__.py b/providers/src/airflow/providers/google/ads/operators/__init__.py similarity index 100% rename from airflow/providers/google/ads/operators/__init__.py rename to providers/src/airflow/providers/google/ads/operators/__init__.py diff --git a/airflow/providers/google/ads/operators/ads.py b/providers/src/airflow/providers/google/ads/operators/ads.py similarity index 100% rename from airflow/providers/google/ads/operators/ads.py rename to providers/src/airflow/providers/google/ads/operators/ads.py diff --git a/airflow/providers/google/ads/transfers/__init__.py b/providers/src/airflow/providers/google/ads/transfers/__init__.py similarity index 100% rename from airflow/providers/google/ads/transfers/__init__.py rename to providers/src/airflow/providers/google/ads/transfers/__init__.py diff --git a/airflow/providers/google/ads/transfers/ads_to_gcs.py b/providers/src/airflow/providers/google/ads/transfers/ads_to_gcs.py similarity index 100% rename from airflow/providers/google/ads/transfers/ads_to_gcs.py rename to providers/src/airflow/providers/google/ads/transfers/ads_to_gcs.py diff --git a/airflow/providers/google/cloud/__init__.py b/providers/src/airflow/providers/google/cloud/__init__.py similarity index 100% rename from airflow/providers/google/cloud/__init__.py rename to providers/src/airflow/providers/google/cloud/__init__.py diff --git a/airflow/providers/google/cloud/_internal_client/__init__.py b/providers/src/airflow/providers/google/cloud/_internal_client/__init__.py similarity index 100% rename from airflow/providers/google/cloud/_internal_client/__init__.py rename to providers/src/airflow/providers/google/cloud/_internal_client/__init__.py diff --git a/airflow/providers/google/cloud/_internal_client/secret_manager_client.py b/providers/src/airflow/providers/google/cloud/_internal_client/secret_manager_client.py similarity index 100% rename from airflow/providers/google/cloud/_internal_client/secret_manager_client.py rename to providers/src/airflow/providers/google/cloud/_internal_client/secret_manager_client.py diff --git a/airflow/providers/google/cloud/example_dags/__init__.py b/providers/src/airflow/providers/google/cloud/example_dags/__init__.py similarity index 100% rename from airflow/providers/google/cloud/example_dags/__init__.py rename to providers/src/airflow/providers/google/cloud/example_dags/__init__.py diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_task.py b/providers/src/airflow/providers/google/cloud/example_dags/example_cloud_task.py similarity index 100% rename from airflow/providers/google/cloud/example_dags/example_cloud_task.py rename to providers/src/airflow/providers/google/cloud/example_dags/example_cloud_task.py diff --git a/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py b/providers/src/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py rename to providers/src/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py diff --git a/airflow/providers/google/cloud/example_dags/example_looker.py b/providers/src/airflow/providers/google/cloud/example_dags/example_looker.py similarity index 100% rename from airflow/providers/google/cloud/example_dags/example_looker.py rename to providers/src/airflow/providers/google/cloud/example_dags/example_looker.py diff --git a/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py b/providers/src/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py rename to providers/src/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py diff --git a/airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py b/providers/src/airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py rename to providers/src/airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py diff --git a/airflow/providers/google/cloud/fs/__init__.py b/providers/src/airflow/providers/google/cloud/fs/__init__.py similarity index 100% rename from airflow/providers/google/cloud/fs/__init__.py rename to providers/src/airflow/providers/google/cloud/fs/__init__.py diff --git a/airflow/providers/google/cloud/fs/gcs.py b/providers/src/airflow/providers/google/cloud/fs/gcs.py similarity index 100% rename from airflow/providers/google/cloud/fs/gcs.py rename to providers/src/airflow/providers/google/cloud/fs/gcs.py diff --git a/airflow/providers/google/cloud/hooks/__init__.py b/providers/src/airflow/providers/google/cloud/hooks/__init__.py similarity index 100% rename from airflow/providers/google/cloud/hooks/__init__.py rename to providers/src/airflow/providers/google/cloud/hooks/__init__.py diff --git a/airflow/providers/google/cloud/hooks/automl.py b/providers/src/airflow/providers/google/cloud/hooks/automl.py similarity index 100% rename from airflow/providers/google/cloud/hooks/automl.py rename to providers/src/airflow/providers/google/cloud/hooks/automl.py diff --git a/airflow/providers/google/cloud/hooks/bigquery.py b/providers/src/airflow/providers/google/cloud/hooks/bigquery.py similarity index 100% rename from airflow/providers/google/cloud/hooks/bigquery.py rename to providers/src/airflow/providers/google/cloud/hooks/bigquery.py diff --git a/airflow/providers/google/cloud/hooks/bigquery_dts.py b/providers/src/airflow/providers/google/cloud/hooks/bigquery_dts.py similarity index 100% rename from airflow/providers/google/cloud/hooks/bigquery_dts.py rename to providers/src/airflow/providers/google/cloud/hooks/bigquery_dts.py diff --git a/airflow/providers/google/cloud/hooks/bigtable.py b/providers/src/airflow/providers/google/cloud/hooks/bigtable.py similarity index 100% rename from airflow/providers/google/cloud/hooks/bigtable.py rename to providers/src/airflow/providers/google/cloud/hooks/bigtable.py diff --git a/airflow/providers/google/cloud/hooks/cloud_batch.py b/providers/src/airflow/providers/google/cloud/hooks/cloud_batch.py similarity index 100% rename from airflow/providers/google/cloud/hooks/cloud_batch.py rename to providers/src/airflow/providers/google/cloud/hooks/cloud_batch.py diff --git a/airflow/providers/google/cloud/hooks/cloud_build.py b/providers/src/airflow/providers/google/cloud/hooks/cloud_build.py similarity index 100% rename from airflow/providers/google/cloud/hooks/cloud_build.py rename to providers/src/airflow/providers/google/cloud/hooks/cloud_build.py diff --git a/airflow/providers/google/cloud/hooks/cloud_composer.py b/providers/src/airflow/providers/google/cloud/hooks/cloud_composer.py similarity index 100% rename from airflow/providers/google/cloud/hooks/cloud_composer.py rename to providers/src/airflow/providers/google/cloud/hooks/cloud_composer.py diff --git a/airflow/providers/google/cloud/hooks/cloud_memorystore.py b/providers/src/airflow/providers/google/cloud/hooks/cloud_memorystore.py similarity index 100% rename from airflow/providers/google/cloud/hooks/cloud_memorystore.py rename to providers/src/airflow/providers/google/cloud/hooks/cloud_memorystore.py diff --git a/airflow/providers/google/cloud/hooks/cloud_run.py b/providers/src/airflow/providers/google/cloud/hooks/cloud_run.py similarity index 100% rename from airflow/providers/google/cloud/hooks/cloud_run.py rename to providers/src/airflow/providers/google/cloud/hooks/cloud_run.py diff --git a/airflow/providers/google/cloud/hooks/cloud_sql.py b/providers/src/airflow/providers/google/cloud/hooks/cloud_sql.py similarity index 100% rename from airflow/providers/google/cloud/hooks/cloud_sql.py rename to providers/src/airflow/providers/google/cloud/hooks/cloud_sql.py diff --git a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py b/providers/src/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py similarity index 100% rename from airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py rename to providers/src/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py diff --git a/airflow/providers/google/cloud/hooks/compute.py b/providers/src/airflow/providers/google/cloud/hooks/compute.py similarity index 100% rename from airflow/providers/google/cloud/hooks/compute.py rename to providers/src/airflow/providers/google/cloud/hooks/compute.py diff --git a/airflow/providers/google/cloud/hooks/compute_ssh.py b/providers/src/airflow/providers/google/cloud/hooks/compute_ssh.py similarity index 100% rename from airflow/providers/google/cloud/hooks/compute_ssh.py rename to providers/src/airflow/providers/google/cloud/hooks/compute_ssh.py diff --git a/airflow/providers/google/cloud/hooks/datacatalog.py b/providers/src/airflow/providers/google/cloud/hooks/datacatalog.py similarity index 100% rename from airflow/providers/google/cloud/hooks/datacatalog.py rename to providers/src/airflow/providers/google/cloud/hooks/datacatalog.py diff --git a/airflow/providers/google/cloud/hooks/dataflow.py b/providers/src/airflow/providers/google/cloud/hooks/dataflow.py similarity index 100% rename from airflow/providers/google/cloud/hooks/dataflow.py rename to providers/src/airflow/providers/google/cloud/hooks/dataflow.py diff --git a/airflow/providers/google/cloud/hooks/dataform.py b/providers/src/airflow/providers/google/cloud/hooks/dataform.py similarity index 100% rename from airflow/providers/google/cloud/hooks/dataform.py rename to providers/src/airflow/providers/google/cloud/hooks/dataform.py diff --git a/airflow/providers/google/cloud/hooks/datafusion.py b/providers/src/airflow/providers/google/cloud/hooks/datafusion.py similarity index 100% rename from airflow/providers/google/cloud/hooks/datafusion.py rename to providers/src/airflow/providers/google/cloud/hooks/datafusion.py diff --git a/airflow/providers/google/cloud/hooks/datapipeline.py b/providers/src/airflow/providers/google/cloud/hooks/datapipeline.py similarity index 100% rename from airflow/providers/google/cloud/hooks/datapipeline.py rename to providers/src/airflow/providers/google/cloud/hooks/datapipeline.py diff --git a/airflow/providers/google/cloud/hooks/dataplex.py b/providers/src/airflow/providers/google/cloud/hooks/dataplex.py similarity index 100% rename from airflow/providers/google/cloud/hooks/dataplex.py rename to providers/src/airflow/providers/google/cloud/hooks/dataplex.py diff --git a/airflow/providers/google/cloud/hooks/dataprep.py b/providers/src/airflow/providers/google/cloud/hooks/dataprep.py similarity index 100% rename from airflow/providers/google/cloud/hooks/dataprep.py rename to providers/src/airflow/providers/google/cloud/hooks/dataprep.py diff --git a/airflow/providers/google/cloud/hooks/dataproc.py b/providers/src/airflow/providers/google/cloud/hooks/dataproc.py similarity index 100% rename from airflow/providers/google/cloud/hooks/dataproc.py rename to providers/src/airflow/providers/google/cloud/hooks/dataproc.py diff --git a/airflow/providers/google/cloud/hooks/dataproc_metastore.py b/providers/src/airflow/providers/google/cloud/hooks/dataproc_metastore.py similarity index 100% rename from airflow/providers/google/cloud/hooks/dataproc_metastore.py rename to providers/src/airflow/providers/google/cloud/hooks/dataproc_metastore.py diff --git a/airflow/providers/google/cloud/hooks/datastore.py b/providers/src/airflow/providers/google/cloud/hooks/datastore.py similarity index 100% rename from airflow/providers/google/cloud/hooks/datastore.py rename to providers/src/airflow/providers/google/cloud/hooks/datastore.py diff --git a/airflow/providers/google/cloud/hooks/dlp.py b/providers/src/airflow/providers/google/cloud/hooks/dlp.py similarity index 100% rename from airflow/providers/google/cloud/hooks/dlp.py rename to providers/src/airflow/providers/google/cloud/hooks/dlp.py diff --git a/airflow/providers/google/cloud/hooks/functions.py b/providers/src/airflow/providers/google/cloud/hooks/functions.py similarity index 100% rename from airflow/providers/google/cloud/hooks/functions.py rename to providers/src/airflow/providers/google/cloud/hooks/functions.py diff --git a/airflow/providers/google/cloud/hooks/gcs.py b/providers/src/airflow/providers/google/cloud/hooks/gcs.py similarity index 100% rename from airflow/providers/google/cloud/hooks/gcs.py rename to providers/src/airflow/providers/google/cloud/hooks/gcs.py diff --git a/airflow/providers/google/cloud/hooks/gdm.py b/providers/src/airflow/providers/google/cloud/hooks/gdm.py similarity index 100% rename from airflow/providers/google/cloud/hooks/gdm.py rename to providers/src/airflow/providers/google/cloud/hooks/gdm.py diff --git a/airflow/providers/google/cloud/hooks/kms.py b/providers/src/airflow/providers/google/cloud/hooks/kms.py similarity index 100% rename from airflow/providers/google/cloud/hooks/kms.py rename to providers/src/airflow/providers/google/cloud/hooks/kms.py diff --git a/airflow/providers/google/cloud/hooks/kubernetes_engine.py b/providers/src/airflow/providers/google/cloud/hooks/kubernetes_engine.py similarity index 100% rename from airflow/providers/google/cloud/hooks/kubernetes_engine.py rename to providers/src/airflow/providers/google/cloud/hooks/kubernetes_engine.py diff --git a/airflow/providers/google/cloud/hooks/life_sciences.py b/providers/src/airflow/providers/google/cloud/hooks/life_sciences.py similarity index 100% rename from airflow/providers/google/cloud/hooks/life_sciences.py rename to providers/src/airflow/providers/google/cloud/hooks/life_sciences.py diff --git a/airflow/providers/google/cloud/hooks/looker.py b/providers/src/airflow/providers/google/cloud/hooks/looker.py similarity index 100% rename from airflow/providers/google/cloud/hooks/looker.py rename to providers/src/airflow/providers/google/cloud/hooks/looker.py diff --git a/airflow/providers/google/cloud/hooks/mlengine.py b/providers/src/airflow/providers/google/cloud/hooks/mlengine.py similarity index 100% rename from airflow/providers/google/cloud/hooks/mlengine.py rename to providers/src/airflow/providers/google/cloud/hooks/mlengine.py diff --git a/airflow/providers/google/cloud/hooks/natural_language.py b/providers/src/airflow/providers/google/cloud/hooks/natural_language.py similarity index 100% rename from airflow/providers/google/cloud/hooks/natural_language.py rename to providers/src/airflow/providers/google/cloud/hooks/natural_language.py diff --git a/airflow/providers/google/cloud/hooks/os_login.py b/providers/src/airflow/providers/google/cloud/hooks/os_login.py similarity index 100% rename from airflow/providers/google/cloud/hooks/os_login.py rename to providers/src/airflow/providers/google/cloud/hooks/os_login.py diff --git a/airflow/providers/google/cloud/hooks/pubsub.py b/providers/src/airflow/providers/google/cloud/hooks/pubsub.py similarity index 100% rename from airflow/providers/google/cloud/hooks/pubsub.py rename to providers/src/airflow/providers/google/cloud/hooks/pubsub.py diff --git a/airflow/providers/google/cloud/hooks/secret_manager.py b/providers/src/airflow/providers/google/cloud/hooks/secret_manager.py similarity index 100% rename from airflow/providers/google/cloud/hooks/secret_manager.py rename to providers/src/airflow/providers/google/cloud/hooks/secret_manager.py diff --git a/airflow/providers/google/cloud/hooks/spanner.py b/providers/src/airflow/providers/google/cloud/hooks/spanner.py similarity index 100% rename from airflow/providers/google/cloud/hooks/spanner.py rename to providers/src/airflow/providers/google/cloud/hooks/spanner.py diff --git a/airflow/providers/google/cloud/hooks/speech_to_text.py b/providers/src/airflow/providers/google/cloud/hooks/speech_to_text.py similarity index 100% rename from airflow/providers/google/cloud/hooks/speech_to_text.py rename to providers/src/airflow/providers/google/cloud/hooks/speech_to_text.py diff --git a/airflow/providers/google/cloud/hooks/stackdriver.py b/providers/src/airflow/providers/google/cloud/hooks/stackdriver.py similarity index 100% rename from airflow/providers/google/cloud/hooks/stackdriver.py rename to providers/src/airflow/providers/google/cloud/hooks/stackdriver.py diff --git a/airflow/providers/google/cloud/hooks/tasks.py b/providers/src/airflow/providers/google/cloud/hooks/tasks.py similarity index 100% rename from airflow/providers/google/cloud/hooks/tasks.py rename to providers/src/airflow/providers/google/cloud/hooks/tasks.py diff --git a/airflow/providers/google/cloud/hooks/text_to_speech.py b/providers/src/airflow/providers/google/cloud/hooks/text_to_speech.py similarity index 100% rename from airflow/providers/google/cloud/hooks/text_to_speech.py rename to providers/src/airflow/providers/google/cloud/hooks/text_to_speech.py diff --git a/airflow/providers/google/cloud/hooks/translate.py b/providers/src/airflow/providers/google/cloud/hooks/translate.py similarity index 100% rename from airflow/providers/google/cloud/hooks/translate.py rename to providers/src/airflow/providers/google/cloud/hooks/translate.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/__init__.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/__init__.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/__init__.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/__init__.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/dataset.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/dataset.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/dataset.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/dataset.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/model_service.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/model_service.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/model_service.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/model_service.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py b/providers/src/airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py rename to providers/src/airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py diff --git a/airflow/providers/google/cloud/hooks/video_intelligence.py b/providers/src/airflow/providers/google/cloud/hooks/video_intelligence.py similarity index 100% rename from airflow/providers/google/cloud/hooks/video_intelligence.py rename to providers/src/airflow/providers/google/cloud/hooks/video_intelligence.py diff --git a/airflow/providers/google/cloud/hooks/vision.py b/providers/src/airflow/providers/google/cloud/hooks/vision.py similarity index 100% rename from airflow/providers/google/cloud/hooks/vision.py rename to providers/src/airflow/providers/google/cloud/hooks/vision.py diff --git a/airflow/providers/google/cloud/hooks/workflows.py b/providers/src/airflow/providers/google/cloud/hooks/workflows.py similarity index 100% rename from airflow/providers/google/cloud/hooks/workflows.py rename to providers/src/airflow/providers/google/cloud/hooks/workflows.py diff --git a/airflow/providers/google/cloud/links/__init__.py b/providers/src/airflow/providers/google/cloud/links/__init__.py similarity index 100% rename from airflow/providers/google/cloud/links/__init__.py rename to providers/src/airflow/providers/google/cloud/links/__init__.py diff --git a/airflow/providers/google/cloud/links/automl.py b/providers/src/airflow/providers/google/cloud/links/automl.py similarity index 100% rename from airflow/providers/google/cloud/links/automl.py rename to providers/src/airflow/providers/google/cloud/links/automl.py diff --git a/airflow/providers/google/cloud/links/base.py b/providers/src/airflow/providers/google/cloud/links/base.py similarity index 100% rename from airflow/providers/google/cloud/links/base.py rename to providers/src/airflow/providers/google/cloud/links/base.py diff --git a/airflow/providers/google/cloud/links/bigquery.py b/providers/src/airflow/providers/google/cloud/links/bigquery.py similarity index 100% rename from airflow/providers/google/cloud/links/bigquery.py rename to providers/src/airflow/providers/google/cloud/links/bigquery.py diff --git a/airflow/providers/google/cloud/links/bigquery_dts.py b/providers/src/airflow/providers/google/cloud/links/bigquery_dts.py similarity index 100% rename from airflow/providers/google/cloud/links/bigquery_dts.py rename to providers/src/airflow/providers/google/cloud/links/bigquery_dts.py diff --git a/airflow/providers/google/cloud/links/bigtable.py b/providers/src/airflow/providers/google/cloud/links/bigtable.py similarity index 100% rename from airflow/providers/google/cloud/links/bigtable.py rename to providers/src/airflow/providers/google/cloud/links/bigtable.py diff --git a/airflow/providers/google/cloud/links/cloud_build.py b/providers/src/airflow/providers/google/cloud/links/cloud_build.py similarity index 100% rename from airflow/providers/google/cloud/links/cloud_build.py rename to providers/src/airflow/providers/google/cloud/links/cloud_build.py diff --git a/airflow/providers/google/cloud/links/cloud_functions.py b/providers/src/airflow/providers/google/cloud/links/cloud_functions.py similarity index 100% rename from airflow/providers/google/cloud/links/cloud_functions.py rename to providers/src/airflow/providers/google/cloud/links/cloud_functions.py diff --git a/airflow/providers/google/cloud/links/cloud_memorystore.py b/providers/src/airflow/providers/google/cloud/links/cloud_memorystore.py similarity index 100% rename from airflow/providers/google/cloud/links/cloud_memorystore.py rename to providers/src/airflow/providers/google/cloud/links/cloud_memorystore.py diff --git a/airflow/providers/google/cloud/links/cloud_sql.py b/providers/src/airflow/providers/google/cloud/links/cloud_sql.py similarity index 100% rename from airflow/providers/google/cloud/links/cloud_sql.py rename to providers/src/airflow/providers/google/cloud/links/cloud_sql.py diff --git a/airflow/providers/google/cloud/links/cloud_storage_transfer.py b/providers/src/airflow/providers/google/cloud/links/cloud_storage_transfer.py similarity index 100% rename from airflow/providers/google/cloud/links/cloud_storage_transfer.py rename to providers/src/airflow/providers/google/cloud/links/cloud_storage_transfer.py diff --git a/airflow/providers/google/cloud/links/cloud_tasks.py b/providers/src/airflow/providers/google/cloud/links/cloud_tasks.py similarity index 100% rename from airflow/providers/google/cloud/links/cloud_tasks.py rename to providers/src/airflow/providers/google/cloud/links/cloud_tasks.py diff --git a/airflow/providers/google/cloud/links/compute.py b/providers/src/airflow/providers/google/cloud/links/compute.py similarity index 100% rename from airflow/providers/google/cloud/links/compute.py rename to providers/src/airflow/providers/google/cloud/links/compute.py diff --git a/airflow/providers/google/cloud/links/data_loss_prevention.py b/providers/src/airflow/providers/google/cloud/links/data_loss_prevention.py similarity index 100% rename from airflow/providers/google/cloud/links/data_loss_prevention.py rename to providers/src/airflow/providers/google/cloud/links/data_loss_prevention.py diff --git a/airflow/providers/google/cloud/links/datacatalog.py b/providers/src/airflow/providers/google/cloud/links/datacatalog.py similarity index 100% rename from airflow/providers/google/cloud/links/datacatalog.py rename to providers/src/airflow/providers/google/cloud/links/datacatalog.py diff --git a/airflow/providers/google/cloud/links/dataflow.py b/providers/src/airflow/providers/google/cloud/links/dataflow.py similarity index 100% rename from airflow/providers/google/cloud/links/dataflow.py rename to providers/src/airflow/providers/google/cloud/links/dataflow.py diff --git a/airflow/providers/google/cloud/links/dataform.py b/providers/src/airflow/providers/google/cloud/links/dataform.py similarity index 100% rename from airflow/providers/google/cloud/links/dataform.py rename to providers/src/airflow/providers/google/cloud/links/dataform.py diff --git a/airflow/providers/google/cloud/links/datafusion.py b/providers/src/airflow/providers/google/cloud/links/datafusion.py similarity index 100% rename from airflow/providers/google/cloud/links/datafusion.py rename to providers/src/airflow/providers/google/cloud/links/datafusion.py diff --git a/airflow/providers/google/cloud/links/dataplex.py b/providers/src/airflow/providers/google/cloud/links/dataplex.py similarity index 100% rename from airflow/providers/google/cloud/links/dataplex.py rename to providers/src/airflow/providers/google/cloud/links/dataplex.py diff --git a/airflow/providers/google/cloud/links/dataprep.py b/providers/src/airflow/providers/google/cloud/links/dataprep.py similarity index 100% rename from airflow/providers/google/cloud/links/dataprep.py rename to providers/src/airflow/providers/google/cloud/links/dataprep.py diff --git a/airflow/providers/google/cloud/links/dataproc.py b/providers/src/airflow/providers/google/cloud/links/dataproc.py similarity index 100% rename from airflow/providers/google/cloud/links/dataproc.py rename to providers/src/airflow/providers/google/cloud/links/dataproc.py diff --git a/airflow/providers/google/cloud/links/datastore.py b/providers/src/airflow/providers/google/cloud/links/datastore.py similarity index 100% rename from airflow/providers/google/cloud/links/datastore.py rename to providers/src/airflow/providers/google/cloud/links/datastore.py diff --git a/airflow/providers/google/cloud/links/kubernetes_engine.py b/providers/src/airflow/providers/google/cloud/links/kubernetes_engine.py similarity index 100% rename from airflow/providers/google/cloud/links/kubernetes_engine.py rename to providers/src/airflow/providers/google/cloud/links/kubernetes_engine.py diff --git a/airflow/providers/google/cloud/links/life_sciences.py b/providers/src/airflow/providers/google/cloud/links/life_sciences.py similarity index 100% rename from airflow/providers/google/cloud/links/life_sciences.py rename to providers/src/airflow/providers/google/cloud/links/life_sciences.py diff --git a/airflow/providers/google/cloud/links/mlengine.py b/providers/src/airflow/providers/google/cloud/links/mlengine.py similarity index 100% rename from airflow/providers/google/cloud/links/mlengine.py rename to providers/src/airflow/providers/google/cloud/links/mlengine.py diff --git a/airflow/providers/google/cloud/links/pubsub.py b/providers/src/airflow/providers/google/cloud/links/pubsub.py similarity index 100% rename from airflow/providers/google/cloud/links/pubsub.py rename to providers/src/airflow/providers/google/cloud/links/pubsub.py diff --git a/airflow/providers/google/cloud/links/spanner.py b/providers/src/airflow/providers/google/cloud/links/spanner.py similarity index 100% rename from airflow/providers/google/cloud/links/spanner.py rename to providers/src/airflow/providers/google/cloud/links/spanner.py diff --git a/airflow/providers/google/cloud/links/stackdriver.py b/providers/src/airflow/providers/google/cloud/links/stackdriver.py similarity index 100% rename from airflow/providers/google/cloud/links/stackdriver.py rename to providers/src/airflow/providers/google/cloud/links/stackdriver.py diff --git a/airflow/providers/google/cloud/links/translate.py b/providers/src/airflow/providers/google/cloud/links/translate.py similarity index 100% rename from airflow/providers/google/cloud/links/translate.py rename to providers/src/airflow/providers/google/cloud/links/translate.py diff --git a/airflow/providers/google/cloud/links/vertex_ai.py b/providers/src/airflow/providers/google/cloud/links/vertex_ai.py similarity index 100% rename from airflow/providers/google/cloud/links/vertex_ai.py rename to providers/src/airflow/providers/google/cloud/links/vertex_ai.py diff --git a/airflow/providers/google/cloud/links/workflows.py b/providers/src/airflow/providers/google/cloud/links/workflows.py similarity index 100% rename from airflow/providers/google/cloud/links/workflows.py rename to providers/src/airflow/providers/google/cloud/links/workflows.py diff --git a/airflow/providers/google/cloud/log/__init__.py b/providers/src/airflow/providers/google/cloud/log/__init__.py similarity index 100% rename from airflow/providers/google/cloud/log/__init__.py rename to providers/src/airflow/providers/google/cloud/log/__init__.py diff --git a/airflow/providers/google/cloud/log/gcs_task_handler.py b/providers/src/airflow/providers/google/cloud/log/gcs_task_handler.py similarity index 100% rename from airflow/providers/google/cloud/log/gcs_task_handler.py rename to providers/src/airflow/providers/google/cloud/log/gcs_task_handler.py diff --git a/airflow/providers/google/cloud/log/stackdriver_task_handler.py b/providers/src/airflow/providers/google/cloud/log/stackdriver_task_handler.py similarity index 100% rename from airflow/providers/google/cloud/log/stackdriver_task_handler.py rename to providers/src/airflow/providers/google/cloud/log/stackdriver_task_handler.py diff --git a/airflow/providers/google/cloud/openlineage/BigQueryErrorRunFacet.json b/providers/src/airflow/providers/google/cloud/openlineage/BigQueryErrorRunFacet.json similarity index 100% rename from airflow/providers/google/cloud/openlineage/BigQueryErrorRunFacet.json rename to providers/src/airflow/providers/google/cloud/openlineage/BigQueryErrorRunFacet.json diff --git a/airflow/providers/google/cloud/openlineage/BigQueryJobRunFacet.json b/providers/src/airflow/providers/google/cloud/openlineage/BigQueryJobRunFacet.json similarity index 100% rename from airflow/providers/google/cloud/openlineage/BigQueryJobRunFacet.json rename to providers/src/airflow/providers/google/cloud/openlineage/BigQueryJobRunFacet.json diff --git a/airflow/providers/google/cloud/openlineage/__init__.py b/providers/src/airflow/providers/google/cloud/openlineage/__init__.py similarity index 100% rename from airflow/providers/google/cloud/openlineage/__init__.py rename to providers/src/airflow/providers/google/cloud/openlineage/__init__.py diff --git a/airflow/providers/google/cloud/openlineage/mixins.py b/providers/src/airflow/providers/google/cloud/openlineage/mixins.py similarity index 100% rename from airflow/providers/google/cloud/openlineage/mixins.py rename to providers/src/airflow/providers/google/cloud/openlineage/mixins.py diff --git a/airflow/providers/google/cloud/openlineage/utils.py b/providers/src/airflow/providers/google/cloud/openlineage/utils.py similarity index 100% rename from airflow/providers/google/cloud/openlineage/utils.py rename to providers/src/airflow/providers/google/cloud/openlineage/utils.py diff --git a/airflow/providers/google/cloud/operators/__init__.py b/providers/src/airflow/providers/google/cloud/operators/__init__.py similarity index 100% rename from airflow/providers/google/cloud/operators/__init__.py rename to providers/src/airflow/providers/google/cloud/operators/__init__.py diff --git a/airflow/providers/google/cloud/operators/automl.py b/providers/src/airflow/providers/google/cloud/operators/automl.py similarity index 100% rename from airflow/providers/google/cloud/operators/automl.py rename to providers/src/airflow/providers/google/cloud/operators/automl.py diff --git a/airflow/providers/google/cloud/operators/bigquery.py b/providers/src/airflow/providers/google/cloud/operators/bigquery.py similarity index 99% rename from airflow/providers/google/cloud/operators/bigquery.py rename to providers/src/airflow/providers/google/cloud/operators/bigquery.py index 1637f51d8f89e..876ff8d51f162 100644 --- a/airflow/providers/google/cloud/operators/bigquery.py +++ b/providers/src/airflow/providers/google/cloud/operators/bigquery.py @@ -35,7 +35,7 @@ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException from airflow.models import BaseOperator, BaseOperatorLink from airflow.models.xcom import XCom -from airflow.providers.common.sql.operators.sql import ( +from airflow.providers.common.sql.operators.sql import ( # type: ignore[attr-defined] # for _parse_boolean SQLCheckOperator, SQLColumnCheckOperator, SQLIntervalCheckOperator, diff --git a/airflow/providers/google/cloud/operators/bigquery_dts.py b/providers/src/airflow/providers/google/cloud/operators/bigquery_dts.py similarity index 100% rename from airflow/providers/google/cloud/operators/bigquery_dts.py rename to providers/src/airflow/providers/google/cloud/operators/bigquery_dts.py diff --git a/airflow/providers/google/cloud/operators/bigtable.py b/providers/src/airflow/providers/google/cloud/operators/bigtable.py similarity index 100% rename from airflow/providers/google/cloud/operators/bigtable.py rename to providers/src/airflow/providers/google/cloud/operators/bigtable.py diff --git a/airflow/providers/google/cloud/operators/cloud_base.py b/providers/src/airflow/providers/google/cloud/operators/cloud_base.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_base.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_base.py diff --git a/airflow/providers/google/cloud/operators/cloud_batch.py b/providers/src/airflow/providers/google/cloud/operators/cloud_batch.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_batch.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_batch.py diff --git a/airflow/providers/google/cloud/operators/cloud_build.py b/providers/src/airflow/providers/google/cloud/operators/cloud_build.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_build.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_build.py diff --git a/airflow/providers/google/cloud/operators/cloud_composer.py b/providers/src/airflow/providers/google/cloud/operators/cloud_composer.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_composer.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_composer.py diff --git a/airflow/providers/google/cloud/operators/cloud_memorystore.py b/providers/src/airflow/providers/google/cloud/operators/cloud_memorystore.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_memorystore.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_memorystore.py diff --git a/airflow/providers/google/cloud/operators/cloud_run.py b/providers/src/airflow/providers/google/cloud/operators/cloud_run.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_run.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_run.py diff --git a/airflow/providers/google/cloud/operators/cloud_sql.py b/providers/src/airflow/providers/google/cloud/operators/cloud_sql.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_sql.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_sql.py diff --git a/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py b/providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py similarity index 100% rename from airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py rename to providers/src/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py diff --git a/airflow/providers/google/cloud/operators/compute.py b/providers/src/airflow/providers/google/cloud/operators/compute.py similarity index 100% rename from airflow/providers/google/cloud/operators/compute.py rename to providers/src/airflow/providers/google/cloud/operators/compute.py diff --git a/airflow/providers/google/cloud/operators/datacatalog.py b/providers/src/airflow/providers/google/cloud/operators/datacatalog.py similarity index 100% rename from airflow/providers/google/cloud/operators/datacatalog.py rename to providers/src/airflow/providers/google/cloud/operators/datacatalog.py diff --git a/airflow/providers/google/cloud/operators/dataflow.py b/providers/src/airflow/providers/google/cloud/operators/dataflow.py similarity index 100% rename from airflow/providers/google/cloud/operators/dataflow.py rename to providers/src/airflow/providers/google/cloud/operators/dataflow.py diff --git a/airflow/providers/google/cloud/operators/dataform.py b/providers/src/airflow/providers/google/cloud/operators/dataform.py similarity index 100% rename from airflow/providers/google/cloud/operators/dataform.py rename to providers/src/airflow/providers/google/cloud/operators/dataform.py diff --git a/airflow/providers/google/cloud/operators/datafusion.py b/providers/src/airflow/providers/google/cloud/operators/datafusion.py similarity index 100% rename from airflow/providers/google/cloud/operators/datafusion.py rename to providers/src/airflow/providers/google/cloud/operators/datafusion.py diff --git a/airflow/providers/google/cloud/operators/datapipeline.py b/providers/src/airflow/providers/google/cloud/operators/datapipeline.py similarity index 100% rename from airflow/providers/google/cloud/operators/datapipeline.py rename to providers/src/airflow/providers/google/cloud/operators/datapipeline.py diff --git a/airflow/providers/google/cloud/operators/dataplex.py b/providers/src/airflow/providers/google/cloud/operators/dataplex.py similarity index 100% rename from airflow/providers/google/cloud/operators/dataplex.py rename to providers/src/airflow/providers/google/cloud/operators/dataplex.py diff --git a/airflow/providers/google/cloud/operators/dataprep.py b/providers/src/airflow/providers/google/cloud/operators/dataprep.py similarity index 100% rename from airflow/providers/google/cloud/operators/dataprep.py rename to providers/src/airflow/providers/google/cloud/operators/dataprep.py diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/providers/src/airflow/providers/google/cloud/operators/dataproc.py similarity index 100% rename from airflow/providers/google/cloud/operators/dataproc.py rename to providers/src/airflow/providers/google/cloud/operators/dataproc.py diff --git a/airflow/providers/google/cloud/operators/dataproc_metastore.py b/providers/src/airflow/providers/google/cloud/operators/dataproc_metastore.py similarity index 100% rename from airflow/providers/google/cloud/operators/dataproc_metastore.py rename to providers/src/airflow/providers/google/cloud/operators/dataproc_metastore.py diff --git a/airflow/providers/google/cloud/operators/datastore.py b/providers/src/airflow/providers/google/cloud/operators/datastore.py similarity index 100% rename from airflow/providers/google/cloud/operators/datastore.py rename to providers/src/airflow/providers/google/cloud/operators/datastore.py diff --git a/airflow/providers/google/cloud/operators/dlp.py b/providers/src/airflow/providers/google/cloud/operators/dlp.py similarity index 100% rename from airflow/providers/google/cloud/operators/dlp.py rename to providers/src/airflow/providers/google/cloud/operators/dlp.py diff --git a/airflow/providers/google/cloud/operators/functions.py b/providers/src/airflow/providers/google/cloud/operators/functions.py similarity index 100% rename from airflow/providers/google/cloud/operators/functions.py rename to providers/src/airflow/providers/google/cloud/operators/functions.py diff --git a/airflow/providers/google/cloud/operators/gcs.py b/providers/src/airflow/providers/google/cloud/operators/gcs.py similarity index 100% rename from airflow/providers/google/cloud/operators/gcs.py rename to providers/src/airflow/providers/google/cloud/operators/gcs.py diff --git a/airflow/providers/google/cloud/operators/kubernetes_engine.py b/providers/src/airflow/providers/google/cloud/operators/kubernetes_engine.py similarity index 100% rename from airflow/providers/google/cloud/operators/kubernetes_engine.py rename to providers/src/airflow/providers/google/cloud/operators/kubernetes_engine.py diff --git a/airflow/providers/google/cloud/operators/life_sciences.py b/providers/src/airflow/providers/google/cloud/operators/life_sciences.py similarity index 100% rename from airflow/providers/google/cloud/operators/life_sciences.py rename to providers/src/airflow/providers/google/cloud/operators/life_sciences.py diff --git a/airflow/providers/google/cloud/operators/looker.py b/providers/src/airflow/providers/google/cloud/operators/looker.py similarity index 100% rename from airflow/providers/google/cloud/operators/looker.py rename to providers/src/airflow/providers/google/cloud/operators/looker.py diff --git a/airflow/providers/google/cloud/operators/mlengine.py b/providers/src/airflow/providers/google/cloud/operators/mlengine.py similarity index 100% rename from airflow/providers/google/cloud/operators/mlengine.py rename to providers/src/airflow/providers/google/cloud/operators/mlengine.py diff --git a/airflow/providers/google/cloud/operators/natural_language.py b/providers/src/airflow/providers/google/cloud/operators/natural_language.py similarity index 100% rename from airflow/providers/google/cloud/operators/natural_language.py rename to providers/src/airflow/providers/google/cloud/operators/natural_language.py diff --git a/airflow/providers/google/cloud/operators/pubsub.py b/providers/src/airflow/providers/google/cloud/operators/pubsub.py similarity index 100% rename from airflow/providers/google/cloud/operators/pubsub.py rename to providers/src/airflow/providers/google/cloud/operators/pubsub.py diff --git a/airflow/providers/google/cloud/operators/spanner.py b/providers/src/airflow/providers/google/cloud/operators/spanner.py similarity index 100% rename from airflow/providers/google/cloud/operators/spanner.py rename to providers/src/airflow/providers/google/cloud/operators/spanner.py diff --git a/airflow/providers/google/cloud/operators/speech_to_text.py b/providers/src/airflow/providers/google/cloud/operators/speech_to_text.py similarity index 100% rename from airflow/providers/google/cloud/operators/speech_to_text.py rename to providers/src/airflow/providers/google/cloud/operators/speech_to_text.py diff --git a/airflow/providers/google/cloud/operators/stackdriver.py b/providers/src/airflow/providers/google/cloud/operators/stackdriver.py similarity index 100% rename from airflow/providers/google/cloud/operators/stackdriver.py rename to providers/src/airflow/providers/google/cloud/operators/stackdriver.py diff --git a/airflow/providers/google/cloud/operators/tasks.py b/providers/src/airflow/providers/google/cloud/operators/tasks.py similarity index 100% rename from airflow/providers/google/cloud/operators/tasks.py rename to providers/src/airflow/providers/google/cloud/operators/tasks.py diff --git a/airflow/providers/google/cloud/operators/text_to_speech.py b/providers/src/airflow/providers/google/cloud/operators/text_to_speech.py similarity index 100% rename from airflow/providers/google/cloud/operators/text_to_speech.py rename to providers/src/airflow/providers/google/cloud/operators/text_to_speech.py diff --git a/airflow/providers/google/cloud/operators/translate.py b/providers/src/airflow/providers/google/cloud/operators/translate.py similarity index 100% rename from airflow/providers/google/cloud/operators/translate.py rename to providers/src/airflow/providers/google/cloud/operators/translate.py diff --git a/airflow/providers/google/cloud/operators/translate_speech.py b/providers/src/airflow/providers/google/cloud/operators/translate_speech.py similarity index 100% rename from airflow/providers/google/cloud/operators/translate_speech.py rename to providers/src/airflow/providers/google/cloud/operators/translate_speech.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/__init__.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/__init__.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/__init__.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/__init__.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/custom_job.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/custom_job.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/custom_job.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/custom_job.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/dataset.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/dataset.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/dataset.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/dataset.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/generative_model.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/generative_model.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/generative_model.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/generative_model.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/model_service.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/model_service.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/model_service.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/model_service.py diff --git a/airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py b/providers/src/airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py similarity index 100% rename from airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py rename to providers/src/airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py diff --git a/airflow/providers/google/cloud/operators/video_intelligence.py b/providers/src/airflow/providers/google/cloud/operators/video_intelligence.py similarity index 100% rename from airflow/providers/google/cloud/operators/video_intelligence.py rename to providers/src/airflow/providers/google/cloud/operators/video_intelligence.py diff --git a/airflow/providers/google/cloud/operators/vision.py b/providers/src/airflow/providers/google/cloud/operators/vision.py similarity index 100% rename from airflow/providers/google/cloud/operators/vision.py rename to providers/src/airflow/providers/google/cloud/operators/vision.py diff --git a/airflow/providers/google/cloud/operators/workflows.py b/providers/src/airflow/providers/google/cloud/operators/workflows.py similarity index 100% rename from airflow/providers/google/cloud/operators/workflows.py rename to providers/src/airflow/providers/google/cloud/operators/workflows.py diff --git a/airflow/providers/google/cloud/secrets/__init__.py b/providers/src/airflow/providers/google/cloud/secrets/__init__.py similarity index 100% rename from airflow/providers/google/cloud/secrets/__init__.py rename to providers/src/airflow/providers/google/cloud/secrets/__init__.py diff --git a/airflow/providers/google/cloud/secrets/secret_manager.py b/providers/src/airflow/providers/google/cloud/secrets/secret_manager.py similarity index 100% rename from airflow/providers/google/cloud/secrets/secret_manager.py rename to providers/src/airflow/providers/google/cloud/secrets/secret_manager.py diff --git a/airflow/providers/google/cloud/sensors/__init__.py b/providers/src/airflow/providers/google/cloud/sensors/__init__.py similarity index 100% rename from airflow/providers/google/cloud/sensors/__init__.py rename to providers/src/airflow/providers/google/cloud/sensors/__init__.py diff --git a/airflow/providers/google/cloud/sensors/bigquery.py b/providers/src/airflow/providers/google/cloud/sensors/bigquery.py similarity index 100% rename from airflow/providers/google/cloud/sensors/bigquery.py rename to providers/src/airflow/providers/google/cloud/sensors/bigquery.py diff --git a/airflow/providers/google/cloud/sensors/bigquery_dts.py b/providers/src/airflow/providers/google/cloud/sensors/bigquery_dts.py similarity index 100% rename from airflow/providers/google/cloud/sensors/bigquery_dts.py rename to providers/src/airflow/providers/google/cloud/sensors/bigquery_dts.py diff --git a/airflow/providers/google/cloud/sensors/bigtable.py b/providers/src/airflow/providers/google/cloud/sensors/bigtable.py similarity index 100% rename from airflow/providers/google/cloud/sensors/bigtable.py rename to providers/src/airflow/providers/google/cloud/sensors/bigtable.py diff --git a/airflow/providers/google/cloud/sensors/cloud_composer.py b/providers/src/airflow/providers/google/cloud/sensors/cloud_composer.py similarity index 100% rename from airflow/providers/google/cloud/sensors/cloud_composer.py rename to providers/src/airflow/providers/google/cloud/sensors/cloud_composer.py diff --git a/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py b/providers/src/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py similarity index 100% rename from airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py rename to providers/src/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py diff --git a/airflow/providers/google/cloud/sensors/dataflow.py b/providers/src/airflow/providers/google/cloud/sensors/dataflow.py similarity index 100% rename from airflow/providers/google/cloud/sensors/dataflow.py rename to providers/src/airflow/providers/google/cloud/sensors/dataflow.py diff --git a/airflow/providers/google/cloud/sensors/dataform.py b/providers/src/airflow/providers/google/cloud/sensors/dataform.py similarity index 100% rename from airflow/providers/google/cloud/sensors/dataform.py rename to providers/src/airflow/providers/google/cloud/sensors/dataform.py diff --git a/airflow/providers/google/cloud/sensors/datafusion.py b/providers/src/airflow/providers/google/cloud/sensors/datafusion.py similarity index 100% rename from airflow/providers/google/cloud/sensors/datafusion.py rename to providers/src/airflow/providers/google/cloud/sensors/datafusion.py diff --git a/airflow/providers/google/cloud/sensors/dataplex.py b/providers/src/airflow/providers/google/cloud/sensors/dataplex.py similarity index 100% rename from airflow/providers/google/cloud/sensors/dataplex.py rename to providers/src/airflow/providers/google/cloud/sensors/dataplex.py diff --git a/airflow/providers/google/cloud/sensors/dataprep.py b/providers/src/airflow/providers/google/cloud/sensors/dataprep.py similarity index 100% rename from airflow/providers/google/cloud/sensors/dataprep.py rename to providers/src/airflow/providers/google/cloud/sensors/dataprep.py diff --git a/airflow/providers/google/cloud/sensors/dataproc.py b/providers/src/airflow/providers/google/cloud/sensors/dataproc.py similarity index 100% rename from airflow/providers/google/cloud/sensors/dataproc.py rename to providers/src/airflow/providers/google/cloud/sensors/dataproc.py diff --git a/airflow/providers/google/cloud/sensors/dataproc_metastore.py b/providers/src/airflow/providers/google/cloud/sensors/dataproc_metastore.py similarity index 100% rename from airflow/providers/google/cloud/sensors/dataproc_metastore.py rename to providers/src/airflow/providers/google/cloud/sensors/dataproc_metastore.py diff --git a/airflow/providers/google/cloud/sensors/gcs.py b/providers/src/airflow/providers/google/cloud/sensors/gcs.py similarity index 100% rename from airflow/providers/google/cloud/sensors/gcs.py rename to providers/src/airflow/providers/google/cloud/sensors/gcs.py diff --git a/airflow/providers/google/cloud/sensors/looker.py b/providers/src/airflow/providers/google/cloud/sensors/looker.py similarity index 100% rename from airflow/providers/google/cloud/sensors/looker.py rename to providers/src/airflow/providers/google/cloud/sensors/looker.py diff --git a/airflow/providers/google/cloud/sensors/pubsub.py b/providers/src/airflow/providers/google/cloud/sensors/pubsub.py similarity index 100% rename from airflow/providers/google/cloud/sensors/pubsub.py rename to providers/src/airflow/providers/google/cloud/sensors/pubsub.py diff --git a/airflow/providers/google/cloud/sensors/tasks.py b/providers/src/airflow/providers/google/cloud/sensors/tasks.py similarity index 100% rename from airflow/providers/google/cloud/sensors/tasks.py rename to providers/src/airflow/providers/google/cloud/sensors/tasks.py diff --git a/airflow/providers/google/cloud/sensors/workflows.py b/providers/src/airflow/providers/google/cloud/sensors/workflows.py similarity index 100% rename from airflow/providers/google/cloud/sensors/workflows.py rename to providers/src/airflow/providers/google/cloud/sensors/workflows.py diff --git a/airflow/providers/google/cloud/transfers/__init__.py b/providers/src/airflow/providers/google/cloud/transfers/__init__.py similarity index 100% rename from airflow/providers/google/cloud/transfers/__init__.py rename to providers/src/airflow/providers/google/cloud/transfers/__init__.py diff --git a/airflow/providers/google/cloud/transfers/adls_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/adls_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/adls_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/adls_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py b/providers/src/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py similarity index 100% rename from airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py rename to providers/src/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/bigquery_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_mssql.py b/providers/src/airflow/providers/google/cloud/transfers/bigquery_to_mssql.py similarity index 100% rename from airflow/providers/google/cloud/transfers/bigquery_to_mssql.py rename to providers/src/airflow/providers/google/cloud/transfers/bigquery_to_mssql.py diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py b/providers/src/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py similarity index 100% rename from airflow/providers/google/cloud/transfers/bigquery_to_mysql.py rename to providers/src/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_postgres.py b/providers/src/airflow/providers/google/cloud/transfers/bigquery_to_postgres.py similarity index 100% rename from airflow/providers/google/cloud/transfers/bigquery_to_postgres.py rename to providers/src/airflow/providers/google/cloud/transfers/bigquery_to_postgres.py diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_sql.py b/providers/src/airflow/providers/google/cloud/transfers/bigquery_to_sql.py similarity index 100% rename from airflow/providers/google/cloud/transfers/bigquery_to_sql.py rename to providers/src/airflow/providers/google/cloud/transfers/bigquery_to_sql.py diff --git a/airflow/providers/google/cloud/transfers/calendar_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/calendar_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/calendar_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/calendar_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/cassandra_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py b/providers/src/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py similarity index 100% rename from airflow/providers/google/cloud/transfers/gcs_to_bigquery.py rename to providers/src/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py diff --git a/airflow/providers/google/cloud/transfers/gcs_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/gcs_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/gcs_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/gcs_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/gcs_to_local.py b/providers/src/airflow/providers/google/cloud/transfers/gcs_to_local.py similarity index 100% rename from airflow/providers/google/cloud/transfers/gcs_to_local.py rename to providers/src/airflow/providers/google/cloud/transfers/gcs_to_local.py diff --git a/airflow/providers/google/cloud/transfers/gcs_to_sftp.py b/providers/src/airflow/providers/google/cloud/transfers/gcs_to_sftp.py similarity index 100% rename from airflow/providers/google/cloud/transfers/gcs_to_sftp.py rename to providers/src/airflow/providers/google/cloud/transfers/gcs_to_sftp.py diff --git a/airflow/providers/google/cloud/transfers/gdrive_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/gdrive_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/gdrive_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/gdrive_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/gdrive_to_local.py b/providers/src/airflow/providers/google/cloud/transfers/gdrive_to_local.py similarity index 100% rename from airflow/providers/google/cloud/transfers/gdrive_to_local.py rename to providers/src/airflow/providers/google/cloud/transfers/gdrive_to_local.py diff --git a/airflow/providers/google/cloud/transfers/local_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/local_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/local_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/local_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/mssql_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/mssql_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/mssql_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/mssql_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/mysql_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/mysql_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/mysql_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/mysql_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/oracle_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/oracle_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/oracle_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/oracle_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/postgres_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/postgres_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/postgres_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/postgres_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/presto_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/presto_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/presto_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/presto_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/s3_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/s3_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/s3_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/s3_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/salesforce_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/sftp_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/sftp_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/sftp_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/sftp_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/sheets_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/sheets_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/sheets_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/sheets_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/sql_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/sql_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/sql_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/sql_to_gcs.py diff --git a/airflow/providers/google/cloud/transfers/trino_to_gcs.py b/providers/src/airflow/providers/google/cloud/transfers/trino_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/transfers/trino_to_gcs.py rename to providers/src/airflow/providers/google/cloud/transfers/trino_to_gcs.py diff --git a/airflow/providers/google/cloud/triggers/__init__.py b/providers/src/airflow/providers/google/cloud/triggers/__init__.py similarity index 100% rename from airflow/providers/google/cloud/triggers/__init__.py rename to providers/src/airflow/providers/google/cloud/triggers/__init__.py diff --git a/airflow/providers/google/cloud/triggers/bigquery.py b/providers/src/airflow/providers/google/cloud/triggers/bigquery.py similarity index 100% rename from airflow/providers/google/cloud/triggers/bigquery.py rename to providers/src/airflow/providers/google/cloud/triggers/bigquery.py diff --git a/airflow/providers/google/cloud/triggers/bigquery_dts.py b/providers/src/airflow/providers/google/cloud/triggers/bigquery_dts.py similarity index 100% rename from airflow/providers/google/cloud/triggers/bigquery_dts.py rename to providers/src/airflow/providers/google/cloud/triggers/bigquery_dts.py diff --git a/airflow/providers/google/cloud/triggers/cloud_batch.py b/providers/src/airflow/providers/google/cloud/triggers/cloud_batch.py similarity index 100% rename from airflow/providers/google/cloud/triggers/cloud_batch.py rename to providers/src/airflow/providers/google/cloud/triggers/cloud_batch.py diff --git a/airflow/providers/google/cloud/triggers/cloud_build.py b/providers/src/airflow/providers/google/cloud/triggers/cloud_build.py similarity index 100% rename from airflow/providers/google/cloud/triggers/cloud_build.py rename to providers/src/airflow/providers/google/cloud/triggers/cloud_build.py diff --git a/airflow/providers/google/cloud/triggers/cloud_composer.py b/providers/src/airflow/providers/google/cloud/triggers/cloud_composer.py similarity index 100% rename from airflow/providers/google/cloud/triggers/cloud_composer.py rename to providers/src/airflow/providers/google/cloud/triggers/cloud_composer.py diff --git a/airflow/providers/google/cloud/triggers/cloud_run.py b/providers/src/airflow/providers/google/cloud/triggers/cloud_run.py similarity index 100% rename from airflow/providers/google/cloud/triggers/cloud_run.py rename to providers/src/airflow/providers/google/cloud/triggers/cloud_run.py diff --git a/airflow/providers/google/cloud/triggers/cloud_sql.py b/providers/src/airflow/providers/google/cloud/triggers/cloud_sql.py similarity index 100% rename from airflow/providers/google/cloud/triggers/cloud_sql.py rename to providers/src/airflow/providers/google/cloud/triggers/cloud_sql.py diff --git a/airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py b/providers/src/airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py similarity index 100% rename from airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py rename to providers/src/airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py diff --git a/airflow/providers/google/cloud/triggers/dataflow.py b/providers/src/airflow/providers/google/cloud/triggers/dataflow.py similarity index 100% rename from airflow/providers/google/cloud/triggers/dataflow.py rename to providers/src/airflow/providers/google/cloud/triggers/dataflow.py diff --git a/airflow/providers/google/cloud/triggers/datafusion.py b/providers/src/airflow/providers/google/cloud/triggers/datafusion.py similarity index 100% rename from airflow/providers/google/cloud/triggers/datafusion.py rename to providers/src/airflow/providers/google/cloud/triggers/datafusion.py diff --git a/airflow/providers/google/cloud/triggers/dataplex.py b/providers/src/airflow/providers/google/cloud/triggers/dataplex.py similarity index 100% rename from airflow/providers/google/cloud/triggers/dataplex.py rename to providers/src/airflow/providers/google/cloud/triggers/dataplex.py diff --git a/airflow/providers/google/cloud/triggers/dataproc.py b/providers/src/airflow/providers/google/cloud/triggers/dataproc.py similarity index 100% rename from airflow/providers/google/cloud/triggers/dataproc.py rename to providers/src/airflow/providers/google/cloud/triggers/dataproc.py diff --git a/airflow/providers/google/cloud/triggers/gcs.py b/providers/src/airflow/providers/google/cloud/triggers/gcs.py similarity index 100% rename from airflow/providers/google/cloud/triggers/gcs.py rename to providers/src/airflow/providers/google/cloud/triggers/gcs.py diff --git a/airflow/providers/google/cloud/triggers/kubernetes_engine.py b/providers/src/airflow/providers/google/cloud/triggers/kubernetes_engine.py similarity index 100% rename from airflow/providers/google/cloud/triggers/kubernetes_engine.py rename to providers/src/airflow/providers/google/cloud/triggers/kubernetes_engine.py diff --git a/airflow/providers/google/cloud/triggers/mlengine.py b/providers/src/airflow/providers/google/cloud/triggers/mlengine.py similarity index 100% rename from airflow/providers/google/cloud/triggers/mlengine.py rename to providers/src/airflow/providers/google/cloud/triggers/mlengine.py diff --git a/airflow/providers/google/cloud/triggers/pubsub.py b/providers/src/airflow/providers/google/cloud/triggers/pubsub.py similarity index 100% rename from airflow/providers/google/cloud/triggers/pubsub.py rename to providers/src/airflow/providers/google/cloud/triggers/pubsub.py diff --git a/airflow/providers/google/cloud/triggers/vertex_ai.py b/providers/src/airflow/providers/google/cloud/triggers/vertex_ai.py similarity index 100% rename from airflow/providers/google/cloud/triggers/vertex_ai.py rename to providers/src/airflow/providers/google/cloud/triggers/vertex_ai.py diff --git a/airflow/providers/ftp/sensors/__init__.py b/providers/src/airflow/providers/google/cloud/utils/__init__.py similarity index 100% rename from airflow/providers/ftp/sensors/__init__.py rename to providers/src/airflow/providers/google/cloud/utils/__init__.py diff --git a/airflow/providers/google/cloud/utils/bigquery.py b/providers/src/airflow/providers/google/cloud/utils/bigquery.py similarity index 100% rename from airflow/providers/google/cloud/utils/bigquery.py rename to providers/src/airflow/providers/google/cloud/utils/bigquery.py diff --git a/airflow/providers/google/cloud/utils/bigquery_get_data.py b/providers/src/airflow/providers/google/cloud/utils/bigquery_get_data.py similarity index 100% rename from airflow/providers/google/cloud/utils/bigquery_get_data.py rename to providers/src/airflow/providers/google/cloud/utils/bigquery_get_data.py diff --git a/airflow/providers/google/cloud/utils/credentials_provider.py b/providers/src/airflow/providers/google/cloud/utils/credentials_provider.py similarity index 100% rename from airflow/providers/google/cloud/utils/credentials_provider.py rename to providers/src/airflow/providers/google/cloud/utils/credentials_provider.py diff --git a/airflow/providers/google/cloud/utils/dataform.py b/providers/src/airflow/providers/google/cloud/utils/dataform.py similarity index 100% rename from airflow/providers/google/cloud/utils/dataform.py rename to providers/src/airflow/providers/google/cloud/utils/dataform.py diff --git a/airflow/providers/google/cloud/utils/datafusion.py b/providers/src/airflow/providers/google/cloud/utils/datafusion.py similarity index 100% rename from airflow/providers/google/cloud/utils/datafusion.py rename to providers/src/airflow/providers/google/cloud/utils/datafusion.py diff --git a/airflow/providers/google/cloud/utils/dataproc.py b/providers/src/airflow/providers/google/cloud/utils/dataproc.py similarity index 100% rename from airflow/providers/google/cloud/utils/dataproc.py rename to providers/src/airflow/providers/google/cloud/utils/dataproc.py diff --git a/airflow/providers/google/cloud/utils/external_token_supplier.py b/providers/src/airflow/providers/google/cloud/utils/external_token_supplier.py similarity index 100% rename from airflow/providers/google/cloud/utils/external_token_supplier.py rename to providers/src/airflow/providers/google/cloud/utils/external_token_supplier.py diff --git a/airflow/providers/google/cloud/utils/field_sanitizer.py b/providers/src/airflow/providers/google/cloud/utils/field_sanitizer.py similarity index 100% rename from airflow/providers/google/cloud/utils/field_sanitizer.py rename to providers/src/airflow/providers/google/cloud/utils/field_sanitizer.py diff --git a/airflow/providers/google/cloud/utils/field_validator.py b/providers/src/airflow/providers/google/cloud/utils/field_validator.py similarity index 100% rename from airflow/providers/google/cloud/utils/field_validator.py rename to providers/src/airflow/providers/google/cloud/utils/field_validator.py diff --git a/airflow/providers/google/cloud/utils/helpers.py b/providers/src/airflow/providers/google/cloud/utils/helpers.py similarity index 100% rename from airflow/providers/google/cloud/utils/helpers.py rename to providers/src/airflow/providers/google/cloud/utils/helpers.py diff --git a/airflow/providers/google/cloud/utils/mlengine_operator_utils.py b/providers/src/airflow/providers/google/cloud/utils/mlengine_operator_utils.py similarity index 100% rename from airflow/providers/google/cloud/utils/mlengine_operator_utils.py rename to providers/src/airflow/providers/google/cloud/utils/mlengine_operator_utils.py diff --git a/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py b/providers/src/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py similarity index 100% rename from airflow/providers/google/cloud/utils/mlengine_prediction_summary.py rename to providers/src/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py diff --git a/airflow/providers/google/common/__init__.py b/providers/src/airflow/providers/google/common/__init__.py similarity index 100% rename from airflow/providers/google/common/__init__.py rename to providers/src/airflow/providers/google/common/__init__.py diff --git a/airflow/providers/google/common/auth_backend/__init__.py b/providers/src/airflow/providers/google/common/auth_backend/__init__.py similarity index 100% rename from airflow/providers/google/common/auth_backend/__init__.py rename to providers/src/airflow/providers/google/common/auth_backend/__init__.py diff --git a/airflow/providers/google/common/auth_backend/google_openid.py b/providers/src/airflow/providers/google/common/auth_backend/google_openid.py similarity index 100% rename from airflow/providers/google/common/auth_backend/google_openid.py rename to providers/src/airflow/providers/google/common/auth_backend/google_openid.py diff --git a/airflow/providers/google/common/consts.py b/providers/src/airflow/providers/google/common/consts.py similarity index 100% rename from airflow/providers/google/common/consts.py rename to providers/src/airflow/providers/google/common/consts.py diff --git a/airflow/providers/google/common/deprecated.py b/providers/src/airflow/providers/google/common/deprecated.py similarity index 100% rename from airflow/providers/google/common/deprecated.py rename to providers/src/airflow/providers/google/common/deprecated.py diff --git a/airflow/providers/google/common/hooks/__init__.py b/providers/src/airflow/providers/google/common/hooks/__init__.py similarity index 100% rename from airflow/providers/google/common/hooks/__init__.py rename to providers/src/airflow/providers/google/common/hooks/__init__.py diff --git a/airflow/providers/google/common/hooks/base_google.py b/providers/src/airflow/providers/google/common/hooks/base_google.py similarity index 100% rename from airflow/providers/google/common/hooks/base_google.py rename to providers/src/airflow/providers/google/common/hooks/base_google.py diff --git a/airflow/providers/google/common/hooks/discovery_api.py b/providers/src/airflow/providers/google/common/hooks/discovery_api.py similarity index 100% rename from airflow/providers/google/common/hooks/discovery_api.py rename to providers/src/airflow/providers/google/common/hooks/discovery_api.py diff --git a/airflow/providers/google/common/links/__init__.py b/providers/src/airflow/providers/google/common/links/__init__.py similarity index 100% rename from airflow/providers/google/common/links/__init__.py rename to providers/src/airflow/providers/google/common/links/__init__.py diff --git a/airflow/providers/google/common/links/storage.py b/providers/src/airflow/providers/google/common/links/storage.py similarity index 100% rename from airflow/providers/google/common/links/storage.py rename to providers/src/airflow/providers/google/common/links/storage.py diff --git a/airflow/providers/google/common/utils/__init__.py b/providers/src/airflow/providers/google/common/utils/__init__.py similarity index 100% rename from airflow/providers/google/common/utils/__init__.py rename to providers/src/airflow/providers/google/common/utils/__init__.py diff --git a/airflow/providers/google/common/utils/id_token_credentials.py b/providers/src/airflow/providers/google/common/utils/id_token_credentials.py similarity index 100% rename from airflow/providers/google/common/utils/id_token_credentials.py rename to providers/src/airflow/providers/google/common/utils/id_token_credentials.py diff --git a/airflow/providers/google/datasets/__init__.py b/providers/src/airflow/providers/google/datasets/__init__.py similarity index 100% rename from airflow/providers/google/datasets/__init__.py rename to providers/src/airflow/providers/google/datasets/__init__.py diff --git a/airflow/providers/google/datasets/bigquery.py b/providers/src/airflow/providers/google/datasets/bigquery.py similarity index 100% rename from airflow/providers/google/datasets/bigquery.py rename to providers/src/airflow/providers/google/datasets/bigquery.py diff --git a/airflow/providers/google/firebase/__init__.py b/providers/src/airflow/providers/google/firebase/__init__.py similarity index 100% rename from airflow/providers/google/firebase/__init__.py rename to providers/src/airflow/providers/google/firebase/__init__.py diff --git a/airflow/providers/google/firebase/hooks/__init__.py b/providers/src/airflow/providers/google/firebase/hooks/__init__.py similarity index 100% rename from airflow/providers/google/firebase/hooks/__init__.py rename to providers/src/airflow/providers/google/firebase/hooks/__init__.py diff --git a/airflow/providers/google/firebase/hooks/firestore.py b/providers/src/airflow/providers/google/firebase/hooks/firestore.py similarity index 100% rename from airflow/providers/google/firebase/hooks/firestore.py rename to providers/src/airflow/providers/google/firebase/hooks/firestore.py diff --git a/airflow/providers/google/firebase/operators/__init__.py b/providers/src/airflow/providers/google/firebase/operators/__init__.py similarity index 100% rename from airflow/providers/google/firebase/operators/__init__.py rename to providers/src/airflow/providers/google/firebase/operators/__init__.py diff --git a/airflow/providers/google/firebase/operators/firestore.py b/providers/src/airflow/providers/google/firebase/operators/firestore.py similarity index 100% rename from airflow/providers/google/firebase/operators/firestore.py rename to providers/src/airflow/providers/google/firebase/operators/firestore.py diff --git a/airflow/providers/google/go_module_utils.py b/providers/src/airflow/providers/google/go_module_utils.py similarity index 100% rename from airflow/providers/google/go_module_utils.py rename to providers/src/airflow/providers/google/go_module_utils.py diff --git a/airflow/providers/google/leveldb/__init__.py b/providers/src/airflow/providers/google/leveldb/__init__.py similarity index 100% rename from airflow/providers/google/leveldb/__init__.py rename to providers/src/airflow/providers/google/leveldb/__init__.py diff --git a/airflow/providers/google/leveldb/hooks/__init__.py b/providers/src/airflow/providers/google/leveldb/hooks/__init__.py similarity index 100% rename from airflow/providers/google/leveldb/hooks/__init__.py rename to providers/src/airflow/providers/google/leveldb/hooks/__init__.py diff --git a/airflow/providers/google/leveldb/hooks/leveldb.py b/providers/src/airflow/providers/google/leveldb/hooks/leveldb.py similarity index 100% rename from airflow/providers/google/leveldb/hooks/leveldb.py rename to providers/src/airflow/providers/google/leveldb/hooks/leveldb.py diff --git a/airflow/providers/google/leveldb/operators/__init__.py b/providers/src/airflow/providers/google/leveldb/operators/__init__.py similarity index 100% rename from airflow/providers/google/leveldb/operators/__init__.py rename to providers/src/airflow/providers/google/leveldb/operators/__init__.py diff --git a/airflow/providers/google/leveldb/operators/leveldb.py b/providers/src/airflow/providers/google/leveldb/operators/leveldb.py similarity index 100% rename from airflow/providers/google/leveldb/operators/leveldb.py rename to providers/src/airflow/providers/google/leveldb/operators/leveldb.py diff --git a/airflow/providers/google/marketing_platform/__init__.py b/providers/src/airflow/providers/google/marketing_platform/__init__.py similarity index 100% rename from airflow/providers/google/marketing_platform/__init__.py rename to providers/src/airflow/providers/google/marketing_platform/__init__.py diff --git a/airflow/providers/google/marketing_platform/example_dags/__init__.py b/providers/src/airflow/providers/google/marketing_platform/example_dags/__init__.py similarity index 100% rename from airflow/providers/google/marketing_platform/example_dags/__init__.py rename to providers/src/airflow/providers/google/marketing_platform/example_dags/__init__.py diff --git a/airflow/providers/google/marketing_platform/example_dags/example_display_video.py b/providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py similarity index 100% rename from airflow/providers/google/marketing_platform/example_dags/example_display_video.py rename to providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py diff --git a/airflow/providers/google/marketing_platform/hooks/__init__.py b/providers/src/airflow/providers/google/marketing_platform/hooks/__init__.py similarity index 100% rename from airflow/providers/google/marketing_platform/hooks/__init__.py rename to providers/src/airflow/providers/google/marketing_platform/hooks/__init__.py diff --git a/airflow/providers/google/marketing_platform/hooks/analytics.py b/providers/src/airflow/providers/google/marketing_platform/hooks/analytics.py similarity index 100% rename from airflow/providers/google/marketing_platform/hooks/analytics.py rename to providers/src/airflow/providers/google/marketing_platform/hooks/analytics.py diff --git a/airflow/providers/google/marketing_platform/hooks/analytics_admin.py b/providers/src/airflow/providers/google/marketing_platform/hooks/analytics_admin.py similarity index 100% rename from airflow/providers/google/marketing_platform/hooks/analytics_admin.py rename to providers/src/airflow/providers/google/marketing_platform/hooks/analytics_admin.py diff --git a/airflow/providers/google/marketing_platform/hooks/campaign_manager.py b/providers/src/airflow/providers/google/marketing_platform/hooks/campaign_manager.py similarity index 100% rename from airflow/providers/google/marketing_platform/hooks/campaign_manager.py rename to providers/src/airflow/providers/google/marketing_platform/hooks/campaign_manager.py diff --git a/airflow/providers/google/marketing_platform/hooks/display_video.py b/providers/src/airflow/providers/google/marketing_platform/hooks/display_video.py similarity index 100% rename from airflow/providers/google/marketing_platform/hooks/display_video.py rename to providers/src/airflow/providers/google/marketing_platform/hooks/display_video.py diff --git a/airflow/providers/google/marketing_platform/hooks/search_ads.py b/providers/src/airflow/providers/google/marketing_platform/hooks/search_ads.py similarity index 100% rename from airflow/providers/google/marketing_platform/hooks/search_ads.py rename to providers/src/airflow/providers/google/marketing_platform/hooks/search_ads.py diff --git a/airflow/providers/google/marketing_platform/links/__init__.py b/providers/src/airflow/providers/google/marketing_platform/links/__init__.py similarity index 100% rename from airflow/providers/google/marketing_platform/links/__init__.py rename to providers/src/airflow/providers/google/marketing_platform/links/__init__.py diff --git a/airflow/providers/google/marketing_platform/links/analytics_admin.py b/providers/src/airflow/providers/google/marketing_platform/links/analytics_admin.py similarity index 100% rename from airflow/providers/google/marketing_platform/links/analytics_admin.py rename to providers/src/airflow/providers/google/marketing_platform/links/analytics_admin.py diff --git a/airflow/providers/google/marketing_platform/operators/__init__.py b/providers/src/airflow/providers/google/marketing_platform/operators/__init__.py similarity index 100% rename from airflow/providers/google/marketing_platform/operators/__init__.py rename to providers/src/airflow/providers/google/marketing_platform/operators/__init__.py diff --git a/airflow/providers/google/marketing_platform/operators/analytics.py b/providers/src/airflow/providers/google/marketing_platform/operators/analytics.py similarity index 100% rename from airflow/providers/google/marketing_platform/operators/analytics.py rename to providers/src/airflow/providers/google/marketing_platform/operators/analytics.py diff --git a/airflow/providers/google/marketing_platform/operators/analytics_admin.py b/providers/src/airflow/providers/google/marketing_platform/operators/analytics_admin.py similarity index 100% rename from airflow/providers/google/marketing_platform/operators/analytics_admin.py rename to providers/src/airflow/providers/google/marketing_platform/operators/analytics_admin.py diff --git a/airflow/providers/google/marketing_platform/operators/campaign_manager.py b/providers/src/airflow/providers/google/marketing_platform/operators/campaign_manager.py similarity index 100% rename from airflow/providers/google/marketing_platform/operators/campaign_manager.py rename to providers/src/airflow/providers/google/marketing_platform/operators/campaign_manager.py diff --git a/airflow/providers/google/marketing_platform/operators/display_video.py b/providers/src/airflow/providers/google/marketing_platform/operators/display_video.py similarity index 100% rename from airflow/providers/google/marketing_platform/operators/display_video.py rename to providers/src/airflow/providers/google/marketing_platform/operators/display_video.py diff --git a/airflow/providers/google/marketing_platform/operators/search_ads.py b/providers/src/airflow/providers/google/marketing_platform/operators/search_ads.py similarity index 100% rename from airflow/providers/google/marketing_platform/operators/search_ads.py rename to providers/src/airflow/providers/google/marketing_platform/operators/search_ads.py diff --git a/airflow/providers/google/marketing_platform/sensors/__init__.py b/providers/src/airflow/providers/google/marketing_platform/sensors/__init__.py similarity index 100% rename from airflow/providers/google/marketing_platform/sensors/__init__.py rename to providers/src/airflow/providers/google/marketing_platform/sensors/__init__.py diff --git a/airflow/providers/google/marketing_platform/sensors/campaign_manager.py b/providers/src/airflow/providers/google/marketing_platform/sensors/campaign_manager.py similarity index 100% rename from airflow/providers/google/marketing_platform/sensors/campaign_manager.py rename to providers/src/airflow/providers/google/marketing_platform/sensors/campaign_manager.py diff --git a/airflow/providers/google/marketing_platform/sensors/display_video.py b/providers/src/airflow/providers/google/marketing_platform/sensors/display_video.py similarity index 100% rename from airflow/providers/google/marketing_platform/sensors/display_video.py rename to providers/src/airflow/providers/google/marketing_platform/sensors/display_video.py diff --git a/airflow/providers/google/provider.yaml b/providers/src/airflow/providers/google/provider.yaml similarity index 100% rename from airflow/providers/google/provider.yaml rename to providers/src/airflow/providers/google/provider.yaml diff --git a/airflow/providers/google/cloud/utils/__init__.py b/providers/src/airflow/providers/google/suite/__init__.py similarity index 100% rename from airflow/providers/google/cloud/utils/__init__.py rename to providers/src/airflow/providers/google/suite/__init__.py diff --git a/airflow/providers/google/suite/__init__.py b/providers/src/airflow/providers/google/suite/hooks/__init__.py similarity index 100% rename from airflow/providers/google/suite/__init__.py rename to providers/src/airflow/providers/google/suite/hooks/__init__.py diff --git a/airflow/providers/google/suite/hooks/calendar.py b/providers/src/airflow/providers/google/suite/hooks/calendar.py similarity index 100% rename from airflow/providers/google/suite/hooks/calendar.py rename to providers/src/airflow/providers/google/suite/hooks/calendar.py diff --git a/airflow/providers/google/suite/hooks/drive.py b/providers/src/airflow/providers/google/suite/hooks/drive.py similarity index 100% rename from airflow/providers/google/suite/hooks/drive.py rename to providers/src/airflow/providers/google/suite/hooks/drive.py diff --git a/airflow/providers/google/suite/hooks/sheets.py b/providers/src/airflow/providers/google/suite/hooks/sheets.py similarity index 100% rename from airflow/providers/google/suite/hooks/sheets.py rename to providers/src/airflow/providers/google/suite/hooks/sheets.py diff --git a/airflow/providers/google/suite/hooks/__init__.py b/providers/src/airflow/providers/google/suite/operators/__init__.py similarity index 100% rename from airflow/providers/google/suite/hooks/__init__.py rename to providers/src/airflow/providers/google/suite/operators/__init__.py diff --git a/airflow/providers/google/suite/operators/sheets.py b/providers/src/airflow/providers/google/suite/operators/sheets.py similarity index 100% rename from airflow/providers/google/suite/operators/sheets.py rename to providers/src/airflow/providers/google/suite/operators/sheets.py diff --git a/airflow/providers/google/suite/sensors/__init__.py b/providers/src/airflow/providers/google/suite/sensors/__init__.py similarity index 100% rename from airflow/providers/google/suite/sensors/__init__.py rename to providers/src/airflow/providers/google/suite/sensors/__init__.py diff --git a/airflow/providers/google/suite/sensors/drive.py b/providers/src/airflow/providers/google/suite/sensors/drive.py similarity index 100% rename from airflow/providers/google/suite/sensors/drive.py rename to providers/src/airflow/providers/google/suite/sensors/drive.py diff --git a/airflow/providers/google/suite/transfers/__init__.py b/providers/src/airflow/providers/google/suite/transfers/__init__.py similarity index 100% rename from airflow/providers/google/suite/transfers/__init__.py rename to providers/src/airflow/providers/google/suite/transfers/__init__.py diff --git a/airflow/providers/google/suite/transfers/gcs_to_gdrive.py b/providers/src/airflow/providers/google/suite/transfers/gcs_to_gdrive.py similarity index 100% rename from airflow/providers/google/suite/transfers/gcs_to_gdrive.py rename to providers/src/airflow/providers/google/suite/transfers/gcs_to_gdrive.py diff --git a/airflow/providers/google/suite/transfers/gcs_to_sheets.py b/providers/src/airflow/providers/google/suite/transfers/gcs_to_sheets.py similarity index 100% rename from airflow/providers/google/suite/transfers/gcs_to_sheets.py rename to providers/src/airflow/providers/google/suite/transfers/gcs_to_sheets.py diff --git a/airflow/providers/google/suite/transfers/local_to_drive.py b/providers/src/airflow/providers/google/suite/transfers/local_to_drive.py similarity index 100% rename from airflow/providers/google/suite/transfers/local_to_drive.py rename to providers/src/airflow/providers/google/suite/transfers/local_to_drive.py diff --git a/airflow/providers/google/suite/transfers/sql_to_sheets.py b/providers/src/airflow/providers/google/suite/transfers/sql_to_sheets.py similarity index 100% rename from airflow/providers/google/suite/transfers/sql_to_sheets.py rename to providers/src/airflow/providers/google/suite/transfers/sql_to_sheets.py diff --git a/airflow/providers/grpc/.latest-doc-only-change.txt b/providers/src/airflow/providers/grpc/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/grpc/.latest-doc-only-change.txt rename to providers/src/airflow/providers/grpc/.latest-doc-only-change.txt diff --git a/airflow/providers/grpc/CHANGELOG.rst b/providers/src/airflow/providers/grpc/CHANGELOG.rst similarity index 100% rename from airflow/providers/grpc/CHANGELOG.rst rename to providers/src/airflow/providers/grpc/CHANGELOG.rst diff --git a/airflow/providers/grpc/__init__.py b/providers/src/airflow/providers/grpc/__init__.py similarity index 100% rename from airflow/providers/grpc/__init__.py rename to providers/src/airflow/providers/grpc/__init__.py diff --git a/airflow/providers/google/suite/operators/__init__.py b/providers/src/airflow/providers/grpc/hooks/__init__.py similarity index 100% rename from airflow/providers/google/suite/operators/__init__.py rename to providers/src/airflow/providers/grpc/hooks/__init__.py diff --git a/airflow/providers/grpc/hooks/grpc.py b/providers/src/airflow/providers/grpc/hooks/grpc.py similarity index 100% rename from airflow/providers/grpc/hooks/grpc.py rename to providers/src/airflow/providers/grpc/hooks/grpc.py diff --git a/airflow/providers/grpc/hooks/__init__.py b/providers/src/airflow/providers/grpc/operators/__init__.py similarity index 100% rename from airflow/providers/grpc/hooks/__init__.py rename to providers/src/airflow/providers/grpc/operators/__init__.py diff --git a/airflow/providers/grpc/operators/grpc.py b/providers/src/airflow/providers/grpc/operators/grpc.py similarity index 100% rename from airflow/providers/grpc/operators/grpc.py rename to providers/src/airflow/providers/grpc/operators/grpc.py diff --git a/airflow/providers/grpc/provider.yaml b/providers/src/airflow/providers/grpc/provider.yaml similarity index 100% rename from airflow/providers/grpc/provider.yaml rename to providers/src/airflow/providers/grpc/provider.yaml diff --git a/airflow/providers/hashicorp/.latest-doc-only-change.txt b/providers/src/airflow/providers/hashicorp/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/hashicorp/.latest-doc-only-change.txt rename to providers/src/airflow/providers/hashicorp/.latest-doc-only-change.txt diff --git a/airflow/providers/hashicorp/CHANGELOG.rst b/providers/src/airflow/providers/hashicorp/CHANGELOG.rst similarity index 100% rename from airflow/providers/hashicorp/CHANGELOG.rst rename to providers/src/airflow/providers/hashicorp/CHANGELOG.rst diff --git a/airflow/providers/hashicorp/__init__.py b/providers/src/airflow/providers/hashicorp/__init__.py similarity index 100% rename from airflow/providers/hashicorp/__init__.py rename to providers/src/airflow/providers/hashicorp/__init__.py diff --git a/airflow/providers/hashicorp/_internal_client/__init__.py b/providers/src/airflow/providers/hashicorp/_internal_client/__init__.py similarity index 100% rename from airflow/providers/hashicorp/_internal_client/__init__.py rename to providers/src/airflow/providers/hashicorp/_internal_client/__init__.py diff --git a/airflow/providers/hashicorp/_internal_client/vault_client.py b/providers/src/airflow/providers/hashicorp/_internal_client/vault_client.py similarity index 100% rename from airflow/providers/hashicorp/_internal_client/vault_client.py rename to providers/src/airflow/providers/hashicorp/_internal_client/vault_client.py diff --git a/airflow/providers/hashicorp/hooks/__init__.py b/providers/src/airflow/providers/hashicorp/hooks/__init__.py similarity index 100% rename from airflow/providers/hashicorp/hooks/__init__.py rename to providers/src/airflow/providers/hashicorp/hooks/__init__.py diff --git a/airflow/providers/hashicorp/hooks/vault.py b/providers/src/airflow/providers/hashicorp/hooks/vault.py similarity index 100% rename from airflow/providers/hashicorp/hooks/vault.py rename to providers/src/airflow/providers/hashicorp/hooks/vault.py diff --git a/airflow/providers/hashicorp/provider.yaml b/providers/src/airflow/providers/hashicorp/provider.yaml similarity index 100% rename from airflow/providers/hashicorp/provider.yaml rename to providers/src/airflow/providers/hashicorp/provider.yaml diff --git a/airflow/providers/hashicorp/secrets/__init__.py b/providers/src/airflow/providers/hashicorp/secrets/__init__.py similarity index 100% rename from airflow/providers/hashicorp/secrets/__init__.py rename to providers/src/airflow/providers/hashicorp/secrets/__init__.py diff --git a/airflow/providers/hashicorp/secrets/vault.py b/providers/src/airflow/providers/hashicorp/secrets/vault.py similarity index 100% rename from airflow/providers/hashicorp/secrets/vault.py rename to providers/src/airflow/providers/hashicorp/secrets/vault.py diff --git a/airflow/providers/http/.latest-doc-only-change.txt b/providers/src/airflow/providers/http/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/http/.latest-doc-only-change.txt rename to providers/src/airflow/providers/http/.latest-doc-only-change.txt diff --git a/airflow/providers/http/CHANGELOG.rst b/providers/src/airflow/providers/http/CHANGELOG.rst similarity index 100% rename from airflow/providers/http/CHANGELOG.rst rename to providers/src/airflow/providers/http/CHANGELOG.rst diff --git a/airflow/providers/http/__init__.py b/providers/src/airflow/providers/http/__init__.py similarity index 100% rename from airflow/providers/http/__init__.py rename to providers/src/airflow/providers/http/__init__.py diff --git a/airflow/providers/grpc/operators/__init__.py b/providers/src/airflow/providers/http/hooks/__init__.py similarity index 100% rename from airflow/providers/grpc/operators/__init__.py rename to providers/src/airflow/providers/http/hooks/__init__.py diff --git a/airflow/providers/http/hooks/http.py b/providers/src/airflow/providers/http/hooks/http.py similarity index 100% rename from airflow/providers/http/hooks/http.py rename to providers/src/airflow/providers/http/hooks/http.py diff --git a/airflow/providers/http/hooks/__init__.py b/providers/src/airflow/providers/http/operators/__init__.py similarity index 100% rename from airflow/providers/http/hooks/__init__.py rename to providers/src/airflow/providers/http/operators/__init__.py diff --git a/airflow/providers/http/operators/http.py b/providers/src/airflow/providers/http/operators/http.py similarity index 100% rename from airflow/providers/http/operators/http.py rename to providers/src/airflow/providers/http/operators/http.py diff --git a/airflow/providers/http/provider.yaml b/providers/src/airflow/providers/http/provider.yaml similarity index 100% rename from airflow/providers/http/provider.yaml rename to providers/src/airflow/providers/http/provider.yaml diff --git a/airflow/providers/http/operators/__init__.py b/providers/src/airflow/providers/http/sensors/__init__.py similarity index 100% rename from airflow/providers/http/operators/__init__.py rename to providers/src/airflow/providers/http/sensors/__init__.py diff --git a/airflow/providers/http/sensors/http.py b/providers/src/airflow/providers/http/sensors/http.py similarity index 100% rename from airflow/providers/http/sensors/http.py rename to providers/src/airflow/providers/http/sensors/http.py diff --git a/airflow/providers/http/sensors/__init__.py b/providers/src/airflow/providers/http/triggers/__init__.py similarity index 100% rename from airflow/providers/http/sensors/__init__.py rename to providers/src/airflow/providers/http/triggers/__init__.py diff --git a/airflow/providers/http/triggers/http.py b/providers/src/airflow/providers/http/triggers/http.py similarity index 100% rename from airflow/providers/http/triggers/http.py rename to providers/src/airflow/providers/http/triggers/http.py diff --git a/airflow/providers/imap/.latest-doc-only-change.txt b/providers/src/airflow/providers/imap/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/imap/.latest-doc-only-change.txt rename to providers/src/airflow/providers/imap/.latest-doc-only-change.txt diff --git a/airflow/providers/imap/CHANGELOG.rst b/providers/src/airflow/providers/imap/CHANGELOG.rst similarity index 100% rename from airflow/providers/imap/CHANGELOG.rst rename to providers/src/airflow/providers/imap/CHANGELOG.rst diff --git a/airflow/providers/imap/__init__.py b/providers/src/airflow/providers/imap/__init__.py similarity index 100% rename from airflow/providers/imap/__init__.py rename to providers/src/airflow/providers/imap/__init__.py diff --git a/airflow/providers/http/triggers/__init__.py b/providers/src/airflow/providers/imap/hooks/__init__.py similarity index 100% rename from airflow/providers/http/triggers/__init__.py rename to providers/src/airflow/providers/imap/hooks/__init__.py diff --git a/airflow/providers/imap/hooks/imap.py b/providers/src/airflow/providers/imap/hooks/imap.py similarity index 100% rename from airflow/providers/imap/hooks/imap.py rename to providers/src/airflow/providers/imap/hooks/imap.py diff --git a/airflow/providers/imap/provider.yaml b/providers/src/airflow/providers/imap/provider.yaml similarity index 100% rename from airflow/providers/imap/provider.yaml rename to providers/src/airflow/providers/imap/provider.yaml diff --git a/airflow/providers/imap/hooks/__init__.py b/providers/src/airflow/providers/imap/sensors/__init__.py similarity index 100% rename from airflow/providers/imap/hooks/__init__.py rename to providers/src/airflow/providers/imap/sensors/__init__.py diff --git a/airflow/providers/imap/sensors/imap_attachment.py b/providers/src/airflow/providers/imap/sensors/imap_attachment.py similarity index 100% rename from airflow/providers/imap/sensors/imap_attachment.py rename to providers/src/airflow/providers/imap/sensors/imap_attachment.py diff --git a/airflow/providers/influxdb/.latest-doc-only-change.txt b/providers/src/airflow/providers/influxdb/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/influxdb/.latest-doc-only-change.txt rename to providers/src/airflow/providers/influxdb/.latest-doc-only-change.txt diff --git a/airflow/providers/influxdb/CHANGELOG.rst b/providers/src/airflow/providers/influxdb/CHANGELOG.rst similarity index 100% rename from airflow/providers/influxdb/CHANGELOG.rst rename to providers/src/airflow/providers/influxdb/CHANGELOG.rst diff --git a/airflow/providers/influxdb/__init__.py b/providers/src/airflow/providers/influxdb/__init__.py similarity index 100% rename from airflow/providers/influxdb/__init__.py rename to providers/src/airflow/providers/influxdb/__init__.py diff --git a/airflow/providers/influxdb/hooks/__init__.py b/providers/src/airflow/providers/influxdb/hooks/__init__.py similarity index 100% rename from airflow/providers/influxdb/hooks/__init__.py rename to providers/src/airflow/providers/influxdb/hooks/__init__.py diff --git a/airflow/providers/influxdb/hooks/influxdb.py b/providers/src/airflow/providers/influxdb/hooks/influxdb.py similarity index 100% rename from airflow/providers/influxdb/hooks/influxdb.py rename to providers/src/airflow/providers/influxdb/hooks/influxdb.py diff --git a/airflow/providers/influxdb/operators/__init__.py b/providers/src/airflow/providers/influxdb/operators/__init__.py similarity index 100% rename from airflow/providers/influxdb/operators/__init__.py rename to providers/src/airflow/providers/influxdb/operators/__init__.py diff --git a/airflow/providers/influxdb/operators/influxdb.py b/providers/src/airflow/providers/influxdb/operators/influxdb.py similarity index 100% rename from airflow/providers/influxdb/operators/influxdb.py rename to providers/src/airflow/providers/influxdb/operators/influxdb.py diff --git a/airflow/providers/influxdb/provider.yaml b/providers/src/airflow/providers/influxdb/provider.yaml similarity index 100% rename from airflow/providers/influxdb/provider.yaml rename to providers/src/airflow/providers/influxdb/provider.yaml diff --git a/airflow/providers/jdbc/.latest-doc-only-change.txt b/providers/src/airflow/providers/jdbc/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/jdbc/.latest-doc-only-change.txt rename to providers/src/airflow/providers/jdbc/.latest-doc-only-change.txt diff --git a/airflow/providers/jdbc/CHANGELOG.rst b/providers/src/airflow/providers/jdbc/CHANGELOG.rst similarity index 100% rename from airflow/providers/jdbc/CHANGELOG.rst rename to providers/src/airflow/providers/jdbc/CHANGELOG.rst diff --git a/airflow/providers/jdbc/__init__.py b/providers/src/airflow/providers/jdbc/__init__.py similarity index 100% rename from airflow/providers/jdbc/__init__.py rename to providers/src/airflow/providers/jdbc/__init__.py diff --git a/airflow/providers/imap/sensors/__init__.py b/providers/src/airflow/providers/jdbc/hooks/__init__.py similarity index 100% rename from airflow/providers/imap/sensors/__init__.py rename to providers/src/airflow/providers/jdbc/hooks/__init__.py diff --git a/airflow/providers/jdbc/hooks/jdbc.py b/providers/src/airflow/providers/jdbc/hooks/jdbc.py similarity index 100% rename from airflow/providers/jdbc/hooks/jdbc.py rename to providers/src/airflow/providers/jdbc/hooks/jdbc.py diff --git a/airflow/providers/jdbc/hooks/__init__.py b/providers/src/airflow/providers/jdbc/operators/__init__.py similarity index 100% rename from airflow/providers/jdbc/hooks/__init__.py rename to providers/src/airflow/providers/jdbc/operators/__init__.py diff --git a/airflow/providers/jdbc/operators/jdbc.py b/providers/src/airflow/providers/jdbc/operators/jdbc.py similarity index 100% rename from airflow/providers/jdbc/operators/jdbc.py rename to providers/src/airflow/providers/jdbc/operators/jdbc.py diff --git a/airflow/providers/jdbc/provider.yaml b/providers/src/airflow/providers/jdbc/provider.yaml similarity index 100% rename from airflow/providers/jdbc/provider.yaml rename to providers/src/airflow/providers/jdbc/provider.yaml diff --git a/airflow/providers/jenkins/.latest-doc-only-change.txt b/providers/src/airflow/providers/jenkins/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/jenkins/.latest-doc-only-change.txt rename to providers/src/airflow/providers/jenkins/.latest-doc-only-change.txt diff --git a/airflow/providers/jenkins/CHANGELOG.rst b/providers/src/airflow/providers/jenkins/CHANGELOG.rst similarity index 100% rename from airflow/providers/jenkins/CHANGELOG.rst rename to providers/src/airflow/providers/jenkins/CHANGELOG.rst diff --git a/airflow/providers/jenkins/__init__.py b/providers/src/airflow/providers/jenkins/__init__.py similarity index 100% rename from airflow/providers/jenkins/__init__.py rename to providers/src/airflow/providers/jenkins/__init__.py diff --git a/airflow/providers/jdbc/operators/__init__.py b/providers/src/airflow/providers/jenkins/hooks/__init__.py similarity index 100% rename from airflow/providers/jdbc/operators/__init__.py rename to providers/src/airflow/providers/jenkins/hooks/__init__.py diff --git a/airflow/providers/jenkins/hooks/jenkins.py b/providers/src/airflow/providers/jenkins/hooks/jenkins.py similarity index 100% rename from airflow/providers/jenkins/hooks/jenkins.py rename to providers/src/airflow/providers/jenkins/hooks/jenkins.py diff --git a/airflow/providers/jenkins/hooks/__init__.py b/providers/src/airflow/providers/jenkins/operators/__init__.py similarity index 100% rename from airflow/providers/jenkins/hooks/__init__.py rename to providers/src/airflow/providers/jenkins/operators/__init__.py diff --git a/airflow/providers/jenkins/operators/jenkins_job_trigger.py b/providers/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py similarity index 100% rename from airflow/providers/jenkins/operators/jenkins_job_trigger.py rename to providers/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py diff --git a/airflow/providers/jenkins/provider.yaml b/providers/src/airflow/providers/jenkins/provider.yaml similarity index 100% rename from airflow/providers/jenkins/provider.yaml rename to providers/src/airflow/providers/jenkins/provider.yaml diff --git a/airflow/providers/jenkins/operators/__init__.py b/providers/src/airflow/providers/jenkins/sensors/__init__.py similarity index 100% rename from airflow/providers/jenkins/operators/__init__.py rename to providers/src/airflow/providers/jenkins/sensors/__init__.py diff --git a/airflow/providers/jenkins/sensors/jenkins.py b/providers/src/airflow/providers/jenkins/sensors/jenkins.py similarity index 100% rename from airflow/providers/jenkins/sensors/jenkins.py rename to providers/src/airflow/providers/jenkins/sensors/jenkins.py diff --git a/airflow/providers/microsoft/azure/.latest-doc-only-change.txt b/providers/src/airflow/providers/microsoft/azure/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/microsoft/azure/.latest-doc-only-change.txt rename to providers/src/airflow/providers/microsoft/azure/.latest-doc-only-change.txt diff --git a/airflow/providers/microsoft/azure/CHANGELOG.rst b/providers/src/airflow/providers/microsoft/azure/CHANGELOG.rst similarity index 100% rename from airflow/providers/microsoft/azure/CHANGELOG.rst rename to providers/src/airflow/providers/microsoft/azure/CHANGELOG.rst diff --git a/airflow/providers/microsoft/azure/__init__.py b/providers/src/airflow/providers/microsoft/azure/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/__init__.py rename to providers/src/airflow/providers/microsoft/azure/__init__.py diff --git a/airflow/providers/microsoft/azure/fs/__init__.py b/providers/src/airflow/providers/microsoft/azure/fs/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/fs/__init__.py rename to providers/src/airflow/providers/microsoft/azure/fs/__init__.py diff --git a/airflow/providers/microsoft/azure/fs/adls.py b/providers/src/airflow/providers/microsoft/azure/fs/adls.py similarity index 100% rename from airflow/providers/microsoft/azure/fs/adls.py rename to providers/src/airflow/providers/microsoft/azure/fs/adls.py diff --git a/airflow/providers/jenkins/sensors/__init__.py b/providers/src/airflow/providers/microsoft/azure/hooks/__init__.py similarity index 100% rename from airflow/providers/jenkins/sensors/__init__.py rename to providers/src/airflow/providers/microsoft/azure/hooks/__init__.py diff --git a/airflow/providers/microsoft/azure/hooks/adx.py b/providers/src/airflow/providers/microsoft/azure/hooks/adx.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/adx.py rename to providers/src/airflow/providers/microsoft/azure/hooks/adx.py diff --git a/airflow/providers/microsoft/azure/hooks/asb.py b/providers/src/airflow/providers/microsoft/azure/hooks/asb.py similarity index 96% rename from airflow/providers/microsoft/azure/hooks/asb.py rename to providers/src/airflow/providers/microsoft/azure/hooks/asb.py index c90833f52fea7..317447d111703 100644 --- a/airflow/providers/microsoft/azure/hooks/asb.py +++ b/providers/src/airflow/providers/microsoft/azure/hooks/asb.py @@ -251,9 +251,11 @@ def send_message(self, queue_name: str, messages: str | list[str], batch_message raise TypeError("Queue name cannot be None.") if not messages: raise ValueError("Messages list cannot be empty.") - with self.get_conn() as service_bus_client, service_bus_client.get_queue_sender( - queue_name=queue_name - ) as sender, sender: + with ( + self.get_conn() as service_bus_client, + service_bus_client.get_queue_sender(queue_name=queue_name) as sender, + sender, + ): if isinstance(messages, str): if not batch_message_flag: msg = ServiceBusMessage(messages) @@ -298,9 +300,11 @@ def receive_message( if queue_name is None: raise TypeError("Queue name cannot be None.") - with self.get_conn() as service_bus_client, service_bus_client.get_queue_receiver( - queue_name=queue_name - ) as receiver, receiver: + with ( + self.get_conn() as service_bus_client, + service_bus_client.get_queue_receiver(queue_name=queue_name) as receiver, + receiver, + ): received_msgs = receiver.receive_messages( max_message_count=max_message_count, max_wait_time=max_wait_time ) @@ -335,9 +339,13 @@ def receive_subscription_message( raise TypeError("Subscription name cannot be None.") if topic_name is None: raise TypeError("Topic name cannot be None.") - with self.get_conn() as service_bus_client, service_bus_client.get_subscription_receiver( - topic_name, subscription_name - ) as subscription_receiver, subscription_receiver: + with ( + self.get_conn() as service_bus_client, + service_bus_client.get_subscription_receiver( + topic_name, subscription_name + ) as subscription_receiver, + subscription_receiver, + ): received_msgs = subscription_receiver.receive_messages( max_message_count=max_message_count, max_wait_time=max_wait_time ) diff --git a/airflow/providers/microsoft/azure/hooks/base_azure.py b/providers/src/airflow/providers/microsoft/azure/hooks/base_azure.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/base_azure.py rename to providers/src/airflow/providers/microsoft/azure/hooks/base_azure.py diff --git a/airflow/providers/microsoft/azure/hooks/batch.py b/providers/src/airflow/providers/microsoft/azure/hooks/batch.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/batch.py rename to providers/src/airflow/providers/microsoft/azure/hooks/batch.py diff --git a/airflow/providers/microsoft/azure/hooks/container_instance.py b/providers/src/airflow/providers/microsoft/azure/hooks/container_instance.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/container_instance.py rename to providers/src/airflow/providers/microsoft/azure/hooks/container_instance.py diff --git a/airflow/providers/microsoft/azure/hooks/container_registry.py b/providers/src/airflow/providers/microsoft/azure/hooks/container_registry.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/container_registry.py rename to providers/src/airflow/providers/microsoft/azure/hooks/container_registry.py diff --git a/airflow/providers/microsoft/azure/hooks/container_volume.py b/providers/src/airflow/providers/microsoft/azure/hooks/container_volume.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/container_volume.py rename to providers/src/airflow/providers/microsoft/azure/hooks/container_volume.py diff --git a/airflow/providers/microsoft/azure/hooks/cosmos.py b/providers/src/airflow/providers/microsoft/azure/hooks/cosmos.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/cosmos.py rename to providers/src/airflow/providers/microsoft/azure/hooks/cosmos.py diff --git a/airflow/providers/microsoft/azure/hooks/data_factory.py b/providers/src/airflow/providers/microsoft/azure/hooks/data_factory.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/data_factory.py rename to providers/src/airflow/providers/microsoft/azure/hooks/data_factory.py diff --git a/airflow/providers/microsoft/azure/hooks/data_lake.py b/providers/src/airflow/providers/microsoft/azure/hooks/data_lake.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/data_lake.py rename to providers/src/airflow/providers/microsoft/azure/hooks/data_lake.py diff --git a/airflow/providers/microsoft/azure/hooks/fileshare.py b/providers/src/airflow/providers/microsoft/azure/hooks/fileshare.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/fileshare.py rename to providers/src/airflow/providers/microsoft/azure/hooks/fileshare.py diff --git a/airflow/providers/microsoft/azure/hooks/msgraph.py b/providers/src/airflow/providers/microsoft/azure/hooks/msgraph.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/msgraph.py rename to providers/src/airflow/providers/microsoft/azure/hooks/msgraph.py diff --git a/airflow/providers/microsoft/azure/hooks/powerbi.py b/providers/src/airflow/providers/microsoft/azure/hooks/powerbi.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/powerbi.py rename to providers/src/airflow/providers/microsoft/azure/hooks/powerbi.py diff --git a/airflow/providers/microsoft/azure/hooks/synapse.py b/providers/src/airflow/providers/microsoft/azure/hooks/synapse.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/synapse.py rename to providers/src/airflow/providers/microsoft/azure/hooks/synapse.py diff --git a/airflow/providers/microsoft/azure/hooks/wasb.py b/providers/src/airflow/providers/microsoft/azure/hooks/wasb.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/wasb.py rename to providers/src/airflow/providers/microsoft/azure/hooks/wasb.py diff --git a/airflow/providers/microsoft/azure/log/__init__.py b/providers/src/airflow/providers/microsoft/azure/log/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/log/__init__.py rename to providers/src/airflow/providers/microsoft/azure/log/__init__.py diff --git a/airflow/providers/microsoft/azure/log/wasb_task_handler.py b/providers/src/airflow/providers/microsoft/azure/log/wasb_task_handler.py similarity index 100% rename from airflow/providers/microsoft/azure/log/wasb_task_handler.py rename to providers/src/airflow/providers/microsoft/azure/log/wasb_task_handler.py diff --git a/airflow/providers/microsoft/__init__.py b/providers/src/airflow/providers/microsoft/azure/operators/__init__.py similarity index 100% rename from airflow/providers/microsoft/__init__.py rename to providers/src/airflow/providers/microsoft/azure/operators/__init__.py diff --git a/airflow/providers/microsoft/azure/operators/adls.py b/providers/src/airflow/providers/microsoft/azure/operators/adls.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/adls.py rename to providers/src/airflow/providers/microsoft/azure/operators/adls.py diff --git a/airflow/providers/microsoft/azure/operators/adx.py b/providers/src/airflow/providers/microsoft/azure/operators/adx.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/adx.py rename to providers/src/airflow/providers/microsoft/azure/operators/adx.py diff --git a/airflow/providers/microsoft/azure/operators/asb.py b/providers/src/airflow/providers/microsoft/azure/operators/asb.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/asb.py rename to providers/src/airflow/providers/microsoft/azure/operators/asb.py diff --git a/airflow/providers/microsoft/azure/operators/batch.py b/providers/src/airflow/providers/microsoft/azure/operators/batch.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/batch.py rename to providers/src/airflow/providers/microsoft/azure/operators/batch.py diff --git a/airflow/providers/microsoft/azure/operators/container_instances.py b/providers/src/airflow/providers/microsoft/azure/operators/container_instances.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/container_instances.py rename to providers/src/airflow/providers/microsoft/azure/operators/container_instances.py diff --git a/airflow/providers/microsoft/azure/operators/cosmos.py b/providers/src/airflow/providers/microsoft/azure/operators/cosmos.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/cosmos.py rename to providers/src/airflow/providers/microsoft/azure/operators/cosmos.py diff --git a/airflow/providers/microsoft/azure/operators/data_factory.py b/providers/src/airflow/providers/microsoft/azure/operators/data_factory.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/data_factory.py rename to providers/src/airflow/providers/microsoft/azure/operators/data_factory.py diff --git a/airflow/providers/microsoft/azure/operators/msgraph.py b/providers/src/airflow/providers/microsoft/azure/operators/msgraph.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/msgraph.py rename to providers/src/airflow/providers/microsoft/azure/operators/msgraph.py diff --git a/airflow/providers/microsoft/azure/operators/powerbi.py b/providers/src/airflow/providers/microsoft/azure/operators/powerbi.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/powerbi.py rename to providers/src/airflow/providers/microsoft/azure/operators/powerbi.py diff --git a/airflow/providers/microsoft/azure/operators/synapse.py b/providers/src/airflow/providers/microsoft/azure/operators/synapse.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/synapse.py rename to providers/src/airflow/providers/microsoft/azure/operators/synapse.py diff --git a/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py b/providers/src/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/wasb_delete_blob.py rename to providers/src/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py diff --git a/airflow/providers/microsoft/azure/provider.yaml b/providers/src/airflow/providers/microsoft/azure/provider.yaml similarity index 100% rename from airflow/providers/microsoft/azure/provider.yaml rename to providers/src/airflow/providers/microsoft/azure/provider.yaml diff --git a/airflow/providers/microsoft/azure/secrets/__init__.py b/providers/src/airflow/providers/microsoft/azure/secrets/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/secrets/__init__.py rename to providers/src/airflow/providers/microsoft/azure/secrets/__init__.py diff --git a/airflow/providers/microsoft/azure/secrets/key_vault.py b/providers/src/airflow/providers/microsoft/azure/secrets/key_vault.py similarity index 100% rename from airflow/providers/microsoft/azure/secrets/key_vault.py rename to providers/src/airflow/providers/microsoft/azure/secrets/key_vault.py diff --git a/airflow/providers/microsoft/azure/hooks/__init__.py b/providers/src/airflow/providers/microsoft/azure/sensors/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/hooks/__init__.py rename to providers/src/airflow/providers/microsoft/azure/sensors/__init__.py diff --git a/airflow/providers/microsoft/azure/sensors/cosmos.py b/providers/src/airflow/providers/microsoft/azure/sensors/cosmos.py similarity index 100% rename from airflow/providers/microsoft/azure/sensors/cosmos.py rename to providers/src/airflow/providers/microsoft/azure/sensors/cosmos.py diff --git a/airflow/providers/microsoft/azure/sensors/data_factory.py b/providers/src/airflow/providers/microsoft/azure/sensors/data_factory.py similarity index 100% rename from airflow/providers/microsoft/azure/sensors/data_factory.py rename to providers/src/airflow/providers/microsoft/azure/sensors/data_factory.py diff --git a/airflow/providers/microsoft/azure/sensors/msgraph.py b/providers/src/airflow/providers/microsoft/azure/sensors/msgraph.py similarity index 100% rename from airflow/providers/microsoft/azure/sensors/msgraph.py rename to providers/src/airflow/providers/microsoft/azure/sensors/msgraph.py diff --git a/airflow/providers/microsoft/azure/sensors/wasb.py b/providers/src/airflow/providers/microsoft/azure/sensors/wasb.py similarity index 100% rename from airflow/providers/microsoft/azure/sensors/wasb.py rename to providers/src/airflow/providers/microsoft/azure/sensors/wasb.py diff --git a/airflow/providers/microsoft/azure/transfers/__init__.py b/providers/src/airflow/providers/microsoft/azure/transfers/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/transfers/__init__.py rename to providers/src/airflow/providers/microsoft/azure/transfers/__init__.py diff --git a/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py b/providers/src/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py similarity index 100% rename from airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py rename to providers/src/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py diff --git a/airflow/providers/microsoft/azure/transfers/local_to_adls.py b/providers/src/airflow/providers/microsoft/azure/transfers/local_to_adls.py similarity index 100% rename from airflow/providers/microsoft/azure/transfers/local_to_adls.py rename to providers/src/airflow/providers/microsoft/azure/transfers/local_to_adls.py diff --git a/airflow/providers/microsoft/azure/transfers/local_to_wasb.py b/providers/src/airflow/providers/microsoft/azure/transfers/local_to_wasb.py similarity index 100% rename from airflow/providers/microsoft/azure/transfers/local_to_wasb.py rename to providers/src/airflow/providers/microsoft/azure/transfers/local_to_wasb.py diff --git a/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py b/providers/src/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py similarity index 100% rename from airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py rename to providers/src/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py diff --git a/airflow/providers/microsoft/azure/transfers/s3_to_wasb.py b/providers/src/airflow/providers/microsoft/azure/transfers/s3_to_wasb.py similarity index 100% rename from airflow/providers/microsoft/azure/transfers/s3_to_wasb.py rename to providers/src/airflow/providers/microsoft/azure/transfers/s3_to_wasb.py diff --git a/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py b/providers/src/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py similarity index 100% rename from airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py rename to providers/src/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py diff --git a/airflow/providers/microsoft/azure/triggers/__init__.py b/providers/src/airflow/providers/microsoft/azure/triggers/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/triggers/__init__.py rename to providers/src/airflow/providers/microsoft/azure/triggers/__init__.py diff --git a/airflow/providers/microsoft/azure/triggers/data_factory.py b/providers/src/airflow/providers/microsoft/azure/triggers/data_factory.py similarity index 100% rename from airflow/providers/microsoft/azure/triggers/data_factory.py rename to providers/src/airflow/providers/microsoft/azure/triggers/data_factory.py diff --git a/airflow/providers/microsoft/azure/triggers/msgraph.py b/providers/src/airflow/providers/microsoft/azure/triggers/msgraph.py similarity index 100% rename from airflow/providers/microsoft/azure/triggers/msgraph.py rename to providers/src/airflow/providers/microsoft/azure/triggers/msgraph.py diff --git a/airflow/providers/microsoft/azure/triggers/powerbi.py b/providers/src/airflow/providers/microsoft/azure/triggers/powerbi.py similarity index 100% rename from airflow/providers/microsoft/azure/triggers/powerbi.py rename to providers/src/airflow/providers/microsoft/azure/triggers/powerbi.py diff --git a/airflow/providers/microsoft/azure/triggers/wasb.py b/providers/src/airflow/providers/microsoft/azure/triggers/wasb.py similarity index 100% rename from airflow/providers/microsoft/azure/triggers/wasb.py rename to providers/src/airflow/providers/microsoft/azure/triggers/wasb.py diff --git a/airflow/providers/microsoft/azure/utils.py b/providers/src/airflow/providers/microsoft/azure/utils.py similarity index 100% rename from airflow/providers/microsoft/azure/utils.py rename to providers/src/airflow/providers/microsoft/azure/utils.py diff --git a/airflow/providers/microsoft/mssql/.latest-doc-only-change.txt b/providers/src/airflow/providers/microsoft/mssql/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/microsoft/mssql/.latest-doc-only-change.txt rename to providers/src/airflow/providers/microsoft/mssql/.latest-doc-only-change.txt diff --git a/airflow/providers/microsoft/mssql/CHANGELOG.rst b/providers/src/airflow/providers/microsoft/mssql/CHANGELOG.rst similarity index 100% rename from airflow/providers/microsoft/mssql/CHANGELOG.rst rename to providers/src/airflow/providers/microsoft/mssql/CHANGELOG.rst diff --git a/airflow/providers/microsoft/mssql/__init__.py b/providers/src/airflow/providers/microsoft/mssql/__init__.py similarity index 100% rename from airflow/providers/microsoft/mssql/__init__.py rename to providers/src/airflow/providers/microsoft/mssql/__init__.py diff --git a/airflow/providers/microsoft/azure/operators/__init__.py b/providers/src/airflow/providers/microsoft/mssql/hooks/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/__init__.py rename to providers/src/airflow/providers/microsoft/mssql/hooks/__init__.py diff --git a/airflow/providers/microsoft/mssql/hooks/mssql.py b/providers/src/airflow/providers/microsoft/mssql/hooks/mssql.py similarity index 100% rename from airflow/providers/microsoft/mssql/hooks/mssql.py rename to providers/src/airflow/providers/microsoft/mssql/hooks/mssql.py diff --git a/airflow/providers/microsoft/azure/sensors/__init__.py b/providers/src/airflow/providers/microsoft/mssql/operators/__init__.py similarity index 100% rename from airflow/providers/microsoft/azure/sensors/__init__.py rename to providers/src/airflow/providers/microsoft/mssql/operators/__init__.py diff --git a/airflow/providers/microsoft/mssql/operators/mssql.py b/providers/src/airflow/providers/microsoft/mssql/operators/mssql.py similarity index 100% rename from airflow/providers/microsoft/mssql/operators/mssql.py rename to providers/src/airflow/providers/microsoft/mssql/operators/mssql.py diff --git a/airflow/providers/microsoft/mssql/provider.yaml b/providers/src/airflow/providers/microsoft/mssql/provider.yaml similarity index 100% rename from airflow/providers/microsoft/mssql/provider.yaml rename to providers/src/airflow/providers/microsoft/mssql/provider.yaml diff --git a/airflow/providers/microsoft/psrp/.latest-doc-only-change.txt b/providers/src/airflow/providers/microsoft/psrp/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/microsoft/psrp/.latest-doc-only-change.txt rename to providers/src/airflow/providers/microsoft/psrp/.latest-doc-only-change.txt diff --git a/airflow/providers/microsoft/psrp/CHANGELOG.rst b/providers/src/airflow/providers/microsoft/psrp/CHANGELOG.rst similarity index 100% rename from airflow/providers/microsoft/psrp/CHANGELOG.rst rename to providers/src/airflow/providers/microsoft/psrp/CHANGELOG.rst diff --git a/airflow/providers/microsoft/psrp/__init__.py b/providers/src/airflow/providers/microsoft/psrp/__init__.py similarity index 100% rename from airflow/providers/microsoft/psrp/__init__.py rename to providers/src/airflow/providers/microsoft/psrp/__init__.py diff --git a/airflow/providers/microsoft/mssql/hooks/__init__.py b/providers/src/airflow/providers/microsoft/psrp/hooks/__init__.py similarity index 100% rename from airflow/providers/microsoft/mssql/hooks/__init__.py rename to providers/src/airflow/providers/microsoft/psrp/hooks/__init__.py diff --git a/airflow/providers/microsoft/psrp/hooks/psrp.py b/providers/src/airflow/providers/microsoft/psrp/hooks/psrp.py similarity index 100% rename from airflow/providers/microsoft/psrp/hooks/psrp.py rename to providers/src/airflow/providers/microsoft/psrp/hooks/psrp.py diff --git a/airflow/providers/microsoft/mssql/operators/__init__.py b/providers/src/airflow/providers/microsoft/psrp/operators/__init__.py similarity index 100% rename from airflow/providers/microsoft/mssql/operators/__init__.py rename to providers/src/airflow/providers/microsoft/psrp/operators/__init__.py diff --git a/airflow/providers/microsoft/psrp/operators/psrp.py b/providers/src/airflow/providers/microsoft/psrp/operators/psrp.py similarity index 93% rename from airflow/providers/microsoft/psrp/operators/psrp.py rename to providers/src/airflow/providers/microsoft/psrp/operators/psrp.py index dd7da36a9e156..6927159a44ea2 100644 --- a/airflow/providers/microsoft/psrp/operators/psrp.py +++ b/providers/src/airflow/providers/microsoft/psrp/operators/psrp.py @@ -17,8 +17,9 @@ # under the License. from __future__ import annotations +from collections.abc import Sequence from logging import DEBUG -from typing import TYPE_CHECKING, Any, Sequence +from typing import TYPE_CHECKING, Any from jinja2.nativetypes import NativeEnvironment from pypsrp.serializer import TaggedValue @@ -128,13 +129,16 @@ def __init__( self.psrp_session_init = psrp_session_init def execute(self, context: Context) -> list[Any] | None: - with PsrpHook( - self.conn_id, - logging_level=self.logging_level, - runspace_options=self.runspace_options, - wsman_options=self.wsman_options, - on_output_callback=self.log.info if not self.do_xcom_push else None, - ) as hook, hook.invoke() as ps: + with ( + PsrpHook( + self.conn_id, + logging_level=self.logging_level, + runspace_options=self.runspace_options, + wsman_options=self.wsman_options, + on_output_callback=self.log.info if not self.do_xcom_push else None, + ) as hook, + hook.invoke() as ps, + ): if self.psrp_session_init is not None: ps.add_command(self.psrp_session_init) if self.command: diff --git a/airflow/providers/microsoft/psrp/provider.yaml b/providers/src/airflow/providers/microsoft/psrp/provider.yaml similarity index 100% rename from airflow/providers/microsoft/psrp/provider.yaml rename to providers/src/airflow/providers/microsoft/psrp/provider.yaml diff --git a/airflow/providers/microsoft/winrm/.latest-doc-only-change.txt b/providers/src/airflow/providers/microsoft/winrm/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/microsoft/winrm/.latest-doc-only-change.txt rename to providers/src/airflow/providers/microsoft/winrm/.latest-doc-only-change.txt diff --git a/airflow/providers/microsoft/winrm/CHANGELOG.rst b/providers/src/airflow/providers/microsoft/winrm/CHANGELOG.rst similarity index 100% rename from airflow/providers/microsoft/winrm/CHANGELOG.rst rename to providers/src/airflow/providers/microsoft/winrm/CHANGELOG.rst diff --git a/airflow/providers/microsoft/winrm/__init__.py b/providers/src/airflow/providers/microsoft/winrm/__init__.py similarity index 100% rename from airflow/providers/microsoft/winrm/__init__.py rename to providers/src/airflow/providers/microsoft/winrm/__init__.py diff --git a/airflow/providers/microsoft/psrp/hooks/__init__.py b/providers/src/airflow/providers/microsoft/winrm/hooks/__init__.py similarity index 100% rename from airflow/providers/microsoft/psrp/hooks/__init__.py rename to providers/src/airflow/providers/microsoft/winrm/hooks/__init__.py diff --git a/airflow/providers/microsoft/winrm/hooks/winrm.py b/providers/src/airflow/providers/microsoft/winrm/hooks/winrm.py similarity index 100% rename from airflow/providers/microsoft/winrm/hooks/winrm.py rename to providers/src/airflow/providers/microsoft/winrm/hooks/winrm.py diff --git a/airflow/providers/microsoft/psrp/operators/__init__.py b/providers/src/airflow/providers/microsoft/winrm/operators/__init__.py similarity index 100% rename from airflow/providers/microsoft/psrp/operators/__init__.py rename to providers/src/airflow/providers/microsoft/winrm/operators/__init__.py diff --git a/airflow/providers/microsoft/winrm/operators/winrm.py b/providers/src/airflow/providers/microsoft/winrm/operators/winrm.py similarity index 100% rename from airflow/providers/microsoft/winrm/operators/winrm.py rename to providers/src/airflow/providers/microsoft/winrm/operators/winrm.py diff --git a/airflow/providers/microsoft/winrm/provider.yaml b/providers/src/airflow/providers/microsoft/winrm/provider.yaml similarity index 100% rename from airflow/providers/microsoft/winrm/provider.yaml rename to providers/src/airflow/providers/microsoft/winrm/provider.yaml diff --git a/airflow/providers/mongo/.latest-doc-only-change.txt b/providers/src/airflow/providers/mongo/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/mongo/.latest-doc-only-change.txt rename to providers/src/airflow/providers/mongo/.latest-doc-only-change.txt diff --git a/airflow/providers/mongo/CHANGELOG.rst b/providers/src/airflow/providers/mongo/CHANGELOG.rst similarity index 100% rename from airflow/providers/mongo/CHANGELOG.rst rename to providers/src/airflow/providers/mongo/CHANGELOG.rst diff --git a/airflow/providers/mongo/__init__.py b/providers/src/airflow/providers/mongo/__init__.py similarity index 100% rename from airflow/providers/mongo/__init__.py rename to providers/src/airflow/providers/mongo/__init__.py diff --git a/airflow/providers/microsoft/winrm/hooks/__init__.py b/providers/src/airflow/providers/mongo/hooks/__init__.py similarity index 100% rename from airflow/providers/microsoft/winrm/hooks/__init__.py rename to providers/src/airflow/providers/mongo/hooks/__init__.py diff --git a/airflow/providers/mongo/hooks/mongo.py b/providers/src/airflow/providers/mongo/hooks/mongo.py similarity index 100% rename from airflow/providers/mongo/hooks/mongo.py rename to providers/src/airflow/providers/mongo/hooks/mongo.py diff --git a/airflow/providers/mongo/provider.yaml b/providers/src/airflow/providers/mongo/provider.yaml similarity index 100% rename from airflow/providers/mongo/provider.yaml rename to providers/src/airflow/providers/mongo/provider.yaml diff --git a/airflow/providers/microsoft/winrm/operators/__init__.py b/providers/src/airflow/providers/mongo/sensors/__init__.py similarity index 100% rename from airflow/providers/microsoft/winrm/operators/__init__.py rename to providers/src/airflow/providers/mongo/sensors/__init__.py diff --git a/airflow/providers/mongo/sensors/mongo.py b/providers/src/airflow/providers/mongo/sensors/mongo.py similarity index 100% rename from airflow/providers/mongo/sensors/mongo.py rename to providers/src/airflow/providers/mongo/sensors/mongo.py diff --git a/airflow/providers/mysql/.latest-doc-only-change.txt b/providers/src/airflow/providers/mysql/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/mysql/.latest-doc-only-change.txt rename to providers/src/airflow/providers/mysql/.latest-doc-only-change.txt diff --git a/airflow/providers/mysql/CHANGELOG.rst b/providers/src/airflow/providers/mysql/CHANGELOG.rst similarity index 100% rename from airflow/providers/mysql/CHANGELOG.rst rename to providers/src/airflow/providers/mysql/CHANGELOG.rst diff --git a/airflow/providers/mysql/__init__.py b/providers/src/airflow/providers/mysql/__init__.py similarity index 100% rename from airflow/providers/mysql/__init__.py rename to providers/src/airflow/providers/mysql/__init__.py diff --git a/airflow/providers/mysql/assets/__init__.py b/providers/src/airflow/providers/mysql/assets/__init__.py similarity index 100% rename from airflow/providers/mysql/assets/__init__.py rename to providers/src/airflow/providers/mysql/assets/__init__.py diff --git a/airflow/providers/mysql/assets/mysql.py b/providers/src/airflow/providers/mysql/assets/mysql.py similarity index 100% rename from airflow/providers/mysql/assets/mysql.py rename to providers/src/airflow/providers/mysql/assets/mysql.py diff --git a/airflow/providers/mongo/hooks/__init__.py b/providers/src/airflow/providers/mysql/hooks/__init__.py similarity index 100% rename from airflow/providers/mongo/hooks/__init__.py rename to providers/src/airflow/providers/mysql/hooks/__init__.py diff --git a/airflow/providers/mysql/hooks/mysql.py b/providers/src/airflow/providers/mysql/hooks/mysql.py similarity index 100% rename from airflow/providers/mysql/hooks/mysql.py rename to providers/src/airflow/providers/mysql/hooks/mysql.py diff --git a/airflow/providers/mongo/sensors/__init__.py b/providers/src/airflow/providers/mysql/operators/__init__.py similarity index 100% rename from airflow/providers/mongo/sensors/__init__.py rename to providers/src/airflow/providers/mysql/operators/__init__.py diff --git a/airflow/providers/mysql/operators/mysql.py b/providers/src/airflow/providers/mysql/operators/mysql.py similarity index 100% rename from airflow/providers/mysql/operators/mysql.py rename to providers/src/airflow/providers/mysql/operators/mysql.py diff --git a/airflow/providers/mysql/provider.yaml b/providers/src/airflow/providers/mysql/provider.yaml similarity index 100% rename from airflow/providers/mysql/provider.yaml rename to providers/src/airflow/providers/mysql/provider.yaml diff --git a/airflow/providers/mysql/transfers/__init__.py b/providers/src/airflow/providers/mysql/transfers/__init__.py similarity index 100% rename from airflow/providers/mysql/transfers/__init__.py rename to providers/src/airflow/providers/mysql/transfers/__init__.py diff --git a/airflow/providers/mysql/transfers/presto_to_mysql.py b/providers/src/airflow/providers/mysql/transfers/presto_to_mysql.py similarity index 100% rename from airflow/providers/mysql/transfers/presto_to_mysql.py rename to providers/src/airflow/providers/mysql/transfers/presto_to_mysql.py diff --git a/airflow/providers/mysql/transfers/s3_to_mysql.py b/providers/src/airflow/providers/mysql/transfers/s3_to_mysql.py similarity index 100% rename from airflow/providers/mysql/transfers/s3_to_mysql.py rename to providers/src/airflow/providers/mysql/transfers/s3_to_mysql.py diff --git a/airflow/providers/mysql/transfers/trino_to_mysql.py b/providers/src/airflow/providers/mysql/transfers/trino_to_mysql.py similarity index 100% rename from airflow/providers/mysql/transfers/trino_to_mysql.py rename to providers/src/airflow/providers/mysql/transfers/trino_to_mysql.py diff --git a/airflow/providers/mysql/transfers/vertica_to_mysql.py b/providers/src/airflow/providers/mysql/transfers/vertica_to_mysql.py similarity index 100% rename from airflow/providers/mysql/transfers/vertica_to_mysql.py rename to providers/src/airflow/providers/mysql/transfers/vertica_to_mysql.py diff --git a/airflow/providers/neo4j/.latest-doc-only-change.txt b/providers/src/airflow/providers/neo4j/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/neo4j/.latest-doc-only-change.txt rename to providers/src/airflow/providers/neo4j/.latest-doc-only-change.txt diff --git a/airflow/providers/neo4j/CHANGELOG.rst b/providers/src/airflow/providers/neo4j/CHANGELOG.rst similarity index 100% rename from airflow/providers/neo4j/CHANGELOG.rst rename to providers/src/airflow/providers/neo4j/CHANGELOG.rst diff --git a/airflow/providers/neo4j/README.md b/providers/src/airflow/providers/neo4j/README.md similarity index 100% rename from airflow/providers/neo4j/README.md rename to providers/src/airflow/providers/neo4j/README.md diff --git a/airflow/providers/neo4j/__init__.py b/providers/src/airflow/providers/neo4j/__init__.py similarity index 100% rename from airflow/providers/neo4j/__init__.py rename to providers/src/airflow/providers/neo4j/__init__.py diff --git a/airflow/providers/mysql/hooks/__init__.py b/providers/src/airflow/providers/neo4j/hooks/__init__.py similarity index 100% rename from airflow/providers/mysql/hooks/__init__.py rename to providers/src/airflow/providers/neo4j/hooks/__init__.py diff --git a/airflow/providers/neo4j/hooks/neo4j.py b/providers/src/airflow/providers/neo4j/hooks/neo4j.py similarity index 100% rename from airflow/providers/neo4j/hooks/neo4j.py rename to providers/src/airflow/providers/neo4j/hooks/neo4j.py diff --git a/airflow/providers/mysql/operators/__init__.py b/providers/src/airflow/providers/neo4j/operators/__init__.py similarity index 100% rename from airflow/providers/mysql/operators/__init__.py rename to providers/src/airflow/providers/neo4j/operators/__init__.py diff --git a/airflow/providers/neo4j/operators/neo4j.py b/providers/src/airflow/providers/neo4j/operators/neo4j.py similarity index 100% rename from airflow/providers/neo4j/operators/neo4j.py rename to providers/src/airflow/providers/neo4j/operators/neo4j.py diff --git a/airflow/providers/neo4j/provider.yaml b/providers/src/airflow/providers/neo4j/provider.yaml similarity index 100% rename from airflow/providers/neo4j/provider.yaml rename to providers/src/airflow/providers/neo4j/provider.yaml diff --git a/airflow/providers/odbc/.latest-doc-only-change.txt b/providers/src/airflow/providers/odbc/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/odbc/.latest-doc-only-change.txt rename to providers/src/airflow/providers/odbc/.latest-doc-only-change.txt diff --git a/airflow/providers/odbc/CHANGELOG.rst b/providers/src/airflow/providers/odbc/CHANGELOG.rst similarity index 100% rename from airflow/providers/odbc/CHANGELOG.rst rename to providers/src/airflow/providers/odbc/CHANGELOG.rst diff --git a/airflow/providers/odbc/__init__.py b/providers/src/airflow/providers/odbc/__init__.py similarity index 100% rename from airflow/providers/odbc/__init__.py rename to providers/src/airflow/providers/odbc/__init__.py diff --git a/airflow/providers/odbc/hooks/__init__.py b/providers/src/airflow/providers/odbc/hooks/__init__.py similarity index 100% rename from airflow/providers/odbc/hooks/__init__.py rename to providers/src/airflow/providers/odbc/hooks/__init__.py diff --git a/airflow/providers/odbc/hooks/odbc.py b/providers/src/airflow/providers/odbc/hooks/odbc.py similarity index 100% rename from airflow/providers/odbc/hooks/odbc.py rename to providers/src/airflow/providers/odbc/hooks/odbc.py diff --git a/airflow/providers/odbc/provider.yaml b/providers/src/airflow/providers/odbc/provider.yaml similarity index 100% rename from airflow/providers/odbc/provider.yaml rename to providers/src/airflow/providers/odbc/provider.yaml diff --git a/airflow/providers/openai/.latest-doc-only-change.txt b/providers/src/airflow/providers/openai/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/openai/.latest-doc-only-change.txt rename to providers/src/airflow/providers/openai/.latest-doc-only-change.txt diff --git a/airflow/providers/openai/CHANGELOG.rst b/providers/src/airflow/providers/openai/CHANGELOG.rst similarity index 100% rename from airflow/providers/openai/CHANGELOG.rst rename to providers/src/airflow/providers/openai/CHANGELOG.rst diff --git a/airflow/providers/openai/__init__.py b/providers/src/airflow/providers/openai/__init__.py similarity index 100% rename from airflow/providers/openai/__init__.py rename to providers/src/airflow/providers/openai/__init__.py diff --git a/airflow/providers/openai/exceptions.py b/providers/src/airflow/providers/openai/exceptions.py similarity index 100% rename from airflow/providers/openai/exceptions.py rename to providers/src/airflow/providers/openai/exceptions.py diff --git a/airflow/providers/openai/hooks/__init__.py b/providers/src/airflow/providers/openai/hooks/__init__.py similarity index 100% rename from airflow/providers/openai/hooks/__init__.py rename to providers/src/airflow/providers/openai/hooks/__init__.py diff --git a/airflow/providers/openai/hooks/openai.py b/providers/src/airflow/providers/openai/hooks/openai.py similarity index 100% rename from airflow/providers/openai/hooks/openai.py rename to providers/src/airflow/providers/openai/hooks/openai.py diff --git a/airflow/providers/openai/operators/__init__.py b/providers/src/airflow/providers/openai/operators/__init__.py similarity index 100% rename from airflow/providers/openai/operators/__init__.py rename to providers/src/airflow/providers/openai/operators/__init__.py diff --git a/airflow/providers/openai/operators/openai.py b/providers/src/airflow/providers/openai/operators/openai.py similarity index 100% rename from airflow/providers/openai/operators/openai.py rename to providers/src/airflow/providers/openai/operators/openai.py diff --git a/airflow/providers/openai/provider.yaml b/providers/src/airflow/providers/openai/provider.yaml similarity index 100% rename from airflow/providers/openai/provider.yaml rename to providers/src/airflow/providers/openai/provider.yaml diff --git a/airflow/providers/openai/triggers/__init__.py b/providers/src/airflow/providers/openai/triggers/__init__.py similarity index 100% rename from airflow/providers/openai/triggers/__init__.py rename to providers/src/airflow/providers/openai/triggers/__init__.py diff --git a/airflow/providers/openai/triggers/openai.py b/providers/src/airflow/providers/openai/triggers/openai.py similarity index 100% rename from airflow/providers/openai/triggers/openai.py rename to providers/src/airflow/providers/openai/triggers/openai.py diff --git a/airflow/providers/openfaas/.latest-doc-only-change.txt b/providers/src/airflow/providers/openfaas/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/openfaas/.latest-doc-only-change.txt rename to providers/src/airflow/providers/openfaas/.latest-doc-only-change.txt diff --git a/airflow/providers/openfaas/CHANGELOG.rst b/providers/src/airflow/providers/openfaas/CHANGELOG.rst similarity index 100% rename from airflow/providers/openfaas/CHANGELOG.rst rename to providers/src/airflow/providers/openfaas/CHANGELOG.rst diff --git a/airflow/providers/openfaas/__init__.py b/providers/src/airflow/providers/openfaas/__init__.py similarity index 100% rename from airflow/providers/openfaas/__init__.py rename to providers/src/airflow/providers/openfaas/__init__.py diff --git a/airflow/providers/neo4j/hooks/__init__.py b/providers/src/airflow/providers/openfaas/hooks/__init__.py similarity index 100% rename from airflow/providers/neo4j/hooks/__init__.py rename to providers/src/airflow/providers/openfaas/hooks/__init__.py diff --git a/airflow/providers/openfaas/hooks/openfaas.py b/providers/src/airflow/providers/openfaas/hooks/openfaas.py similarity index 100% rename from airflow/providers/openfaas/hooks/openfaas.py rename to providers/src/airflow/providers/openfaas/hooks/openfaas.py diff --git a/airflow/providers/openfaas/provider.yaml b/providers/src/airflow/providers/openfaas/provider.yaml similarity index 100% rename from airflow/providers/openfaas/provider.yaml rename to providers/src/airflow/providers/openfaas/provider.yaml diff --git a/airflow/providers/openlineage/.latest-doc-only-change.txt b/providers/src/airflow/providers/openlineage/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/openlineage/.latest-doc-only-change.txt rename to providers/src/airflow/providers/openlineage/.latest-doc-only-change.txt diff --git a/airflow/providers/openlineage/CHANGELOG.rst b/providers/src/airflow/providers/openlineage/CHANGELOG.rst similarity index 100% rename from airflow/providers/openlineage/CHANGELOG.rst rename to providers/src/airflow/providers/openlineage/CHANGELOG.rst diff --git a/airflow/providers/openlineage/__init__.py b/providers/src/airflow/providers/openlineage/__init__.py similarity index 100% rename from airflow/providers/openlineage/__init__.py rename to providers/src/airflow/providers/openlineage/__init__.py diff --git a/airflow/providers/openlineage/conf.py b/providers/src/airflow/providers/openlineage/conf.py similarity index 100% rename from airflow/providers/openlineage/conf.py rename to providers/src/airflow/providers/openlineage/conf.py diff --git a/airflow/providers/openlineage/extractors/__init__.py b/providers/src/airflow/providers/openlineage/extractors/__init__.py similarity index 100% rename from airflow/providers/openlineage/extractors/__init__.py rename to providers/src/airflow/providers/openlineage/extractors/__init__.py diff --git a/airflow/providers/openlineage/extractors/base.py b/providers/src/airflow/providers/openlineage/extractors/base.py similarity index 100% rename from airflow/providers/openlineage/extractors/base.py rename to providers/src/airflow/providers/openlineage/extractors/base.py diff --git a/airflow/providers/openlineage/extractors/bash.py b/providers/src/airflow/providers/openlineage/extractors/bash.py similarity index 100% rename from airflow/providers/openlineage/extractors/bash.py rename to providers/src/airflow/providers/openlineage/extractors/bash.py diff --git a/airflow/providers/openlineage/extractors/manager.py b/providers/src/airflow/providers/openlineage/extractors/manager.py similarity index 100% rename from airflow/providers/openlineage/extractors/manager.py rename to providers/src/airflow/providers/openlineage/extractors/manager.py diff --git a/airflow/providers/openlineage/extractors/python.py b/providers/src/airflow/providers/openlineage/extractors/python.py similarity index 100% rename from airflow/providers/openlineage/extractors/python.py rename to providers/src/airflow/providers/openlineage/extractors/python.py diff --git a/airflow/providers/openlineage/facets/AirflowDagRunFacet.json b/providers/src/airflow/providers/openlineage/facets/AirflowDagRunFacet.json similarity index 100% rename from airflow/providers/openlineage/facets/AirflowDagRunFacet.json rename to providers/src/airflow/providers/openlineage/facets/AirflowDagRunFacet.json diff --git a/airflow/providers/openlineage/facets/AirflowDebugRunFacet.json b/providers/src/airflow/providers/openlineage/facets/AirflowDebugRunFacet.json similarity index 100% rename from airflow/providers/openlineage/facets/AirflowDebugRunFacet.json rename to providers/src/airflow/providers/openlineage/facets/AirflowDebugRunFacet.json diff --git a/airflow/providers/openlineage/facets/AirflowJobFacet.json b/providers/src/airflow/providers/openlineage/facets/AirflowJobFacet.json similarity index 100% rename from airflow/providers/openlineage/facets/AirflowJobFacet.json rename to providers/src/airflow/providers/openlineage/facets/AirflowJobFacet.json diff --git a/airflow/providers/openlineage/facets/AirflowRunFacet.json b/providers/src/airflow/providers/openlineage/facets/AirflowRunFacet.json similarity index 100% rename from airflow/providers/openlineage/facets/AirflowRunFacet.json rename to providers/src/airflow/providers/openlineage/facets/AirflowRunFacet.json diff --git a/airflow/providers/openlineage/facets/AirflowStateRunFacet.json b/providers/src/airflow/providers/openlineage/facets/AirflowStateRunFacet.json similarity index 100% rename from airflow/providers/openlineage/facets/AirflowStateRunFacet.json rename to providers/src/airflow/providers/openlineage/facets/AirflowStateRunFacet.json diff --git a/airflow/providers/openlineage/facets/__init__.py b/providers/src/airflow/providers/openlineage/facets/__init__.py similarity index 100% rename from airflow/providers/openlineage/facets/__init__.py rename to providers/src/airflow/providers/openlineage/facets/__init__.py diff --git a/airflow/providers/openlineage/plugins/__init__.py b/providers/src/airflow/providers/openlineage/plugins/__init__.py similarity index 100% rename from airflow/providers/openlineage/plugins/__init__.py rename to providers/src/airflow/providers/openlineage/plugins/__init__.py diff --git a/airflow/providers/openlineage/plugins/adapter.py b/providers/src/airflow/providers/openlineage/plugins/adapter.py similarity index 100% rename from airflow/providers/openlineage/plugins/adapter.py rename to providers/src/airflow/providers/openlineage/plugins/adapter.py diff --git a/airflow/providers/openlineage/plugins/facets.py b/providers/src/airflow/providers/openlineage/plugins/facets.py similarity index 100% rename from airflow/providers/openlineage/plugins/facets.py rename to providers/src/airflow/providers/openlineage/plugins/facets.py diff --git a/airflow/providers/openlineage/plugins/listener.py b/providers/src/airflow/providers/openlineage/plugins/listener.py similarity index 100% rename from airflow/providers/openlineage/plugins/listener.py rename to providers/src/airflow/providers/openlineage/plugins/listener.py diff --git a/airflow/providers/openlineage/plugins/macros.py b/providers/src/airflow/providers/openlineage/plugins/macros.py similarity index 100% rename from airflow/providers/openlineage/plugins/macros.py rename to providers/src/airflow/providers/openlineage/plugins/macros.py diff --git a/airflow/providers/openlineage/plugins/openlineage.py b/providers/src/airflow/providers/openlineage/plugins/openlineage.py similarity index 100% rename from airflow/providers/openlineage/plugins/openlineage.py rename to providers/src/airflow/providers/openlineage/plugins/openlineage.py diff --git a/airflow/providers/openlineage/provider.yaml b/providers/src/airflow/providers/openlineage/provider.yaml similarity index 100% rename from airflow/providers/openlineage/provider.yaml rename to providers/src/airflow/providers/openlineage/provider.yaml diff --git a/airflow/providers/openlineage/sqlparser.py b/providers/src/airflow/providers/openlineage/sqlparser.py similarity index 100% rename from airflow/providers/openlineage/sqlparser.py rename to providers/src/airflow/providers/openlineage/sqlparser.py diff --git a/airflow/providers/openlineage/utils/__init__.py b/providers/src/airflow/providers/openlineage/utils/__init__.py similarity index 100% rename from airflow/providers/openlineage/utils/__init__.py rename to providers/src/airflow/providers/openlineage/utils/__init__.py diff --git a/airflow/providers/openlineage/utils/asset_compat_lineage_collector.py b/providers/src/airflow/providers/openlineage/utils/asset_compat_lineage_collector.py similarity index 100% rename from airflow/providers/openlineage/utils/asset_compat_lineage_collector.py rename to providers/src/airflow/providers/openlineage/utils/asset_compat_lineage_collector.py diff --git a/airflow/providers/openlineage/utils/selective_enable.py b/providers/src/airflow/providers/openlineage/utils/selective_enable.py similarity index 100% rename from airflow/providers/openlineage/utils/selective_enable.py rename to providers/src/airflow/providers/openlineage/utils/selective_enable.py diff --git a/airflow/providers/openlineage/utils/sql.py b/providers/src/airflow/providers/openlineage/utils/sql.py similarity index 100% rename from airflow/providers/openlineage/utils/sql.py rename to providers/src/airflow/providers/openlineage/utils/sql.py diff --git a/airflow/providers/openlineage/utils/utils.py b/providers/src/airflow/providers/openlineage/utils/utils.py similarity index 100% rename from airflow/providers/openlineage/utils/utils.py rename to providers/src/airflow/providers/openlineage/utils/utils.py diff --git a/airflow/providers/opensearch/.latest-doc-only-change.txt b/providers/src/airflow/providers/opensearch/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/opensearch/.latest-doc-only-change.txt rename to providers/src/airflow/providers/opensearch/.latest-doc-only-change.txt diff --git a/airflow/providers/opensearch/CHANGELOG.rst b/providers/src/airflow/providers/opensearch/CHANGELOG.rst similarity index 100% rename from airflow/providers/opensearch/CHANGELOG.rst rename to providers/src/airflow/providers/opensearch/CHANGELOG.rst diff --git a/airflow/providers/opensearch/__init__.py b/providers/src/airflow/providers/opensearch/__init__.py similarity index 100% rename from airflow/providers/opensearch/__init__.py rename to providers/src/airflow/providers/opensearch/__init__.py diff --git a/airflow/providers/opensearch/hooks/__init__.py b/providers/src/airflow/providers/opensearch/hooks/__init__.py similarity index 100% rename from airflow/providers/opensearch/hooks/__init__.py rename to providers/src/airflow/providers/opensearch/hooks/__init__.py diff --git a/airflow/providers/opensearch/hooks/opensearch.py b/providers/src/airflow/providers/opensearch/hooks/opensearch.py similarity index 100% rename from airflow/providers/opensearch/hooks/opensearch.py rename to providers/src/airflow/providers/opensearch/hooks/opensearch.py diff --git a/airflow/providers/opensearch/log/__init__.py b/providers/src/airflow/providers/opensearch/log/__init__.py similarity index 100% rename from airflow/providers/opensearch/log/__init__.py rename to providers/src/airflow/providers/opensearch/log/__init__.py diff --git a/airflow/providers/opensearch/log/os_json_formatter.py b/providers/src/airflow/providers/opensearch/log/os_json_formatter.py similarity index 100% rename from airflow/providers/opensearch/log/os_json_formatter.py rename to providers/src/airflow/providers/opensearch/log/os_json_formatter.py diff --git a/airflow/providers/opensearch/log/os_response.py b/providers/src/airflow/providers/opensearch/log/os_response.py similarity index 100% rename from airflow/providers/opensearch/log/os_response.py rename to providers/src/airflow/providers/opensearch/log/os_response.py diff --git a/airflow/providers/opensearch/log/os_task_handler.py b/providers/src/airflow/providers/opensearch/log/os_task_handler.py similarity index 100% rename from airflow/providers/opensearch/log/os_task_handler.py rename to providers/src/airflow/providers/opensearch/log/os_task_handler.py diff --git a/airflow/providers/opensearch/operators/__init__.py b/providers/src/airflow/providers/opensearch/operators/__init__.py similarity index 100% rename from airflow/providers/opensearch/operators/__init__.py rename to providers/src/airflow/providers/opensearch/operators/__init__.py diff --git a/airflow/providers/opensearch/operators/opensearch.py b/providers/src/airflow/providers/opensearch/operators/opensearch.py similarity index 100% rename from airflow/providers/opensearch/operators/opensearch.py rename to providers/src/airflow/providers/opensearch/operators/opensearch.py diff --git a/airflow/providers/opensearch/provider.yaml b/providers/src/airflow/providers/opensearch/provider.yaml similarity index 100% rename from airflow/providers/opensearch/provider.yaml rename to providers/src/airflow/providers/opensearch/provider.yaml diff --git a/airflow/providers/opsgenie/.latest-doc-only-change.txt b/providers/src/airflow/providers/opsgenie/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/opsgenie/.latest-doc-only-change.txt rename to providers/src/airflow/providers/opsgenie/.latest-doc-only-change.txt diff --git a/airflow/providers/opsgenie/CHANGELOG.rst b/providers/src/airflow/providers/opsgenie/CHANGELOG.rst similarity index 100% rename from airflow/providers/opsgenie/CHANGELOG.rst rename to providers/src/airflow/providers/opsgenie/CHANGELOG.rst diff --git a/airflow/providers/opsgenie/__init__.py b/providers/src/airflow/providers/opsgenie/__init__.py similarity index 100% rename from airflow/providers/opsgenie/__init__.py rename to providers/src/airflow/providers/opsgenie/__init__.py diff --git a/airflow/providers/neo4j/operators/__init__.py b/providers/src/airflow/providers/opsgenie/hooks/__init__.py similarity index 100% rename from airflow/providers/neo4j/operators/__init__.py rename to providers/src/airflow/providers/opsgenie/hooks/__init__.py diff --git a/airflow/providers/opsgenie/hooks/opsgenie.py b/providers/src/airflow/providers/opsgenie/hooks/opsgenie.py similarity index 100% rename from airflow/providers/opsgenie/hooks/opsgenie.py rename to providers/src/airflow/providers/opsgenie/hooks/opsgenie.py diff --git a/airflow/providers/opsgenie/notifications/__init__.py b/providers/src/airflow/providers/opsgenie/notifications/__init__.py similarity index 100% rename from airflow/providers/opsgenie/notifications/__init__.py rename to providers/src/airflow/providers/opsgenie/notifications/__init__.py diff --git a/airflow/providers/opsgenie/notifications/opsgenie.py b/providers/src/airflow/providers/opsgenie/notifications/opsgenie.py similarity index 100% rename from airflow/providers/opsgenie/notifications/opsgenie.py rename to providers/src/airflow/providers/opsgenie/notifications/opsgenie.py diff --git a/airflow/providers/openfaas/hooks/__init__.py b/providers/src/airflow/providers/opsgenie/operators/__init__.py similarity index 100% rename from airflow/providers/openfaas/hooks/__init__.py rename to providers/src/airflow/providers/opsgenie/operators/__init__.py diff --git a/airflow/providers/opsgenie/operators/opsgenie.py b/providers/src/airflow/providers/opsgenie/operators/opsgenie.py similarity index 100% rename from airflow/providers/opsgenie/operators/opsgenie.py rename to providers/src/airflow/providers/opsgenie/operators/opsgenie.py diff --git a/airflow/providers/opsgenie/provider.yaml b/providers/src/airflow/providers/opsgenie/provider.yaml similarity index 100% rename from airflow/providers/opsgenie/provider.yaml rename to providers/src/airflow/providers/opsgenie/provider.yaml diff --git a/airflow/providers/opsgenie/typing/__init__.py b/providers/src/airflow/providers/opsgenie/typing/__init__.py similarity index 100% rename from airflow/providers/opsgenie/typing/__init__.py rename to providers/src/airflow/providers/opsgenie/typing/__init__.py diff --git a/airflow/providers/opsgenie/typing/opsgenie.py b/providers/src/airflow/providers/opsgenie/typing/opsgenie.py similarity index 100% rename from airflow/providers/opsgenie/typing/opsgenie.py rename to providers/src/airflow/providers/opsgenie/typing/opsgenie.py diff --git a/airflow/providers/oracle/.latest-doc-only-change.txt b/providers/src/airflow/providers/oracle/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/oracle/.latest-doc-only-change.txt rename to providers/src/airflow/providers/oracle/.latest-doc-only-change.txt diff --git a/airflow/providers/oracle/CHANGELOG.rst b/providers/src/airflow/providers/oracle/CHANGELOG.rst similarity index 100% rename from airflow/providers/oracle/CHANGELOG.rst rename to providers/src/airflow/providers/oracle/CHANGELOG.rst diff --git a/airflow/providers/oracle/__init__.py b/providers/src/airflow/providers/oracle/__init__.py similarity index 100% rename from airflow/providers/oracle/__init__.py rename to providers/src/airflow/providers/oracle/__init__.py diff --git a/airflow/providers/oracle/example_dags/__init__.py b/providers/src/airflow/providers/oracle/example_dags/__init__.py similarity index 100% rename from airflow/providers/oracle/example_dags/__init__.py rename to providers/src/airflow/providers/oracle/example_dags/__init__.py diff --git a/airflow/providers/oracle/example_dags/example_oracle.py b/providers/src/airflow/providers/oracle/example_dags/example_oracle.py similarity index 100% rename from airflow/providers/oracle/example_dags/example_oracle.py rename to providers/src/airflow/providers/oracle/example_dags/example_oracle.py diff --git a/airflow/providers/opsgenie/hooks/__init__.py b/providers/src/airflow/providers/oracle/hooks/__init__.py similarity index 100% rename from airflow/providers/opsgenie/hooks/__init__.py rename to providers/src/airflow/providers/oracle/hooks/__init__.py diff --git a/airflow/providers/oracle/hooks/oracle.py b/providers/src/airflow/providers/oracle/hooks/oracle.py similarity index 100% rename from airflow/providers/oracle/hooks/oracle.py rename to providers/src/airflow/providers/oracle/hooks/oracle.py diff --git a/airflow/providers/opsgenie/operators/__init__.py b/providers/src/airflow/providers/oracle/operators/__init__.py similarity index 100% rename from airflow/providers/opsgenie/operators/__init__.py rename to providers/src/airflow/providers/oracle/operators/__init__.py diff --git a/airflow/providers/oracle/operators/oracle.py b/providers/src/airflow/providers/oracle/operators/oracle.py similarity index 100% rename from airflow/providers/oracle/operators/oracle.py rename to providers/src/airflow/providers/oracle/operators/oracle.py diff --git a/airflow/providers/oracle/provider.yaml b/providers/src/airflow/providers/oracle/provider.yaml similarity index 100% rename from airflow/providers/oracle/provider.yaml rename to providers/src/airflow/providers/oracle/provider.yaml diff --git a/airflow/providers/oracle/transfers/__init__.py b/providers/src/airflow/providers/oracle/transfers/__init__.py similarity index 100% rename from airflow/providers/oracle/transfers/__init__.py rename to providers/src/airflow/providers/oracle/transfers/__init__.py diff --git a/airflow/providers/oracle/transfers/oracle_to_oracle.py b/providers/src/airflow/providers/oracle/transfers/oracle_to_oracle.py similarity index 100% rename from airflow/providers/oracle/transfers/oracle_to_oracle.py rename to providers/src/airflow/providers/oracle/transfers/oracle_to_oracle.py diff --git a/airflow/providers/pagerduty/.latest-doc-only-change.txt b/providers/src/airflow/providers/pagerduty/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/pagerduty/.latest-doc-only-change.txt rename to providers/src/airflow/providers/pagerduty/.latest-doc-only-change.txt diff --git a/airflow/providers/pagerduty/CHANGELOG.rst b/providers/src/airflow/providers/pagerduty/CHANGELOG.rst similarity index 100% rename from airflow/providers/pagerduty/CHANGELOG.rst rename to providers/src/airflow/providers/pagerduty/CHANGELOG.rst diff --git a/airflow/providers/pagerduty/__init__.py b/providers/src/airflow/providers/pagerduty/__init__.py similarity index 100% rename from airflow/providers/pagerduty/__init__.py rename to providers/src/airflow/providers/pagerduty/__init__.py diff --git a/airflow/providers/oracle/hooks/__init__.py b/providers/src/airflow/providers/pagerduty/hooks/__init__.py similarity index 100% rename from airflow/providers/oracle/hooks/__init__.py rename to providers/src/airflow/providers/pagerduty/hooks/__init__.py diff --git a/airflow/providers/pagerduty/hooks/pagerduty.py b/providers/src/airflow/providers/pagerduty/hooks/pagerduty.py similarity index 100% rename from airflow/providers/pagerduty/hooks/pagerduty.py rename to providers/src/airflow/providers/pagerduty/hooks/pagerduty.py diff --git a/airflow/providers/pagerduty/hooks/pagerduty_events.py b/providers/src/airflow/providers/pagerduty/hooks/pagerduty_events.py similarity index 100% rename from airflow/providers/pagerduty/hooks/pagerduty_events.py rename to providers/src/airflow/providers/pagerduty/hooks/pagerduty_events.py diff --git a/airflow/providers/pagerduty/notifications/__init__.py b/providers/src/airflow/providers/pagerduty/notifications/__init__.py similarity index 100% rename from airflow/providers/pagerduty/notifications/__init__.py rename to providers/src/airflow/providers/pagerduty/notifications/__init__.py diff --git a/airflow/providers/pagerduty/notifications/pagerduty.py b/providers/src/airflow/providers/pagerduty/notifications/pagerduty.py similarity index 100% rename from airflow/providers/pagerduty/notifications/pagerduty.py rename to providers/src/airflow/providers/pagerduty/notifications/pagerduty.py diff --git a/airflow/providers/pagerduty/provider.yaml b/providers/src/airflow/providers/pagerduty/provider.yaml similarity index 100% rename from airflow/providers/pagerduty/provider.yaml rename to providers/src/airflow/providers/pagerduty/provider.yaml diff --git a/airflow/providers/papermill/.latest-doc-only-change.txt b/providers/src/airflow/providers/papermill/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/papermill/.latest-doc-only-change.txt rename to providers/src/airflow/providers/papermill/.latest-doc-only-change.txt diff --git a/airflow/providers/papermill/CHANGELOG.rst b/providers/src/airflow/providers/papermill/CHANGELOG.rst similarity index 100% rename from airflow/providers/papermill/CHANGELOG.rst rename to providers/src/airflow/providers/papermill/CHANGELOG.rst diff --git a/airflow/providers/papermill/__init__.py b/providers/src/airflow/providers/papermill/__init__.py similarity index 100% rename from airflow/providers/papermill/__init__.py rename to providers/src/airflow/providers/papermill/__init__.py diff --git a/airflow/providers/oracle/operators/__init__.py b/providers/src/airflow/providers/papermill/hooks/__init__.py similarity index 100% rename from airflow/providers/oracle/operators/__init__.py rename to providers/src/airflow/providers/papermill/hooks/__init__.py diff --git a/airflow/providers/papermill/hooks/kernel.py b/providers/src/airflow/providers/papermill/hooks/kernel.py similarity index 100% rename from airflow/providers/papermill/hooks/kernel.py rename to providers/src/airflow/providers/papermill/hooks/kernel.py diff --git a/airflow/providers/pagerduty/hooks/__init__.py b/providers/src/airflow/providers/papermill/operators/__init__.py similarity index 100% rename from airflow/providers/pagerduty/hooks/__init__.py rename to providers/src/airflow/providers/papermill/operators/__init__.py diff --git a/airflow/providers/papermill/operators/papermill.py b/providers/src/airflow/providers/papermill/operators/papermill.py similarity index 100% rename from airflow/providers/papermill/operators/papermill.py rename to providers/src/airflow/providers/papermill/operators/papermill.py diff --git a/airflow/providers/papermill/provider.yaml b/providers/src/airflow/providers/papermill/provider.yaml similarity index 100% rename from airflow/providers/papermill/provider.yaml rename to providers/src/airflow/providers/papermill/provider.yaml diff --git a/airflow/providers/pgvector/.latest-doc-only-change.txt b/providers/src/airflow/providers/pgvector/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/pgvector/.latest-doc-only-change.txt rename to providers/src/airflow/providers/pgvector/.latest-doc-only-change.txt diff --git a/airflow/providers/pgvector/CHANGELOG.rst b/providers/src/airflow/providers/pgvector/CHANGELOG.rst similarity index 100% rename from airflow/providers/pgvector/CHANGELOG.rst rename to providers/src/airflow/providers/pgvector/CHANGELOG.rst diff --git a/airflow/providers/pgvector/__init__.py b/providers/src/airflow/providers/pgvector/__init__.py similarity index 100% rename from airflow/providers/pgvector/__init__.py rename to providers/src/airflow/providers/pgvector/__init__.py diff --git a/airflow/providers/pgvector/hooks/__init__.py b/providers/src/airflow/providers/pgvector/hooks/__init__.py similarity index 100% rename from airflow/providers/pgvector/hooks/__init__.py rename to providers/src/airflow/providers/pgvector/hooks/__init__.py diff --git a/airflow/providers/pgvector/hooks/pgvector.py b/providers/src/airflow/providers/pgvector/hooks/pgvector.py similarity index 100% rename from airflow/providers/pgvector/hooks/pgvector.py rename to providers/src/airflow/providers/pgvector/hooks/pgvector.py diff --git a/airflow/providers/pgvector/operators/__init__.py b/providers/src/airflow/providers/pgvector/operators/__init__.py similarity index 100% rename from airflow/providers/pgvector/operators/__init__.py rename to providers/src/airflow/providers/pgvector/operators/__init__.py diff --git a/airflow/providers/pgvector/operators/pgvector.py b/providers/src/airflow/providers/pgvector/operators/pgvector.py similarity index 100% rename from airflow/providers/pgvector/operators/pgvector.py rename to providers/src/airflow/providers/pgvector/operators/pgvector.py diff --git a/airflow/providers/pgvector/provider.yaml b/providers/src/airflow/providers/pgvector/provider.yaml similarity index 100% rename from airflow/providers/pgvector/provider.yaml rename to providers/src/airflow/providers/pgvector/provider.yaml diff --git a/airflow/providers/pinecone/.latest-doc-only-change.txt b/providers/src/airflow/providers/pinecone/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/pinecone/.latest-doc-only-change.txt rename to providers/src/airflow/providers/pinecone/.latest-doc-only-change.txt diff --git a/airflow/providers/pinecone/CHANGELOG.rst b/providers/src/airflow/providers/pinecone/CHANGELOG.rst similarity index 100% rename from airflow/providers/pinecone/CHANGELOG.rst rename to providers/src/airflow/providers/pinecone/CHANGELOG.rst diff --git a/airflow/providers/pinecone/__init__.py b/providers/src/airflow/providers/pinecone/__init__.py similarity index 100% rename from airflow/providers/pinecone/__init__.py rename to providers/src/airflow/providers/pinecone/__init__.py diff --git a/airflow/providers/pinecone/hooks/__init__.py b/providers/src/airflow/providers/pinecone/hooks/__init__.py similarity index 100% rename from airflow/providers/pinecone/hooks/__init__.py rename to providers/src/airflow/providers/pinecone/hooks/__init__.py diff --git a/airflow/providers/pinecone/hooks/pinecone.py b/providers/src/airflow/providers/pinecone/hooks/pinecone.py similarity index 100% rename from airflow/providers/pinecone/hooks/pinecone.py rename to providers/src/airflow/providers/pinecone/hooks/pinecone.py diff --git a/airflow/providers/pinecone/operators/__init__.py b/providers/src/airflow/providers/pinecone/operators/__init__.py similarity index 100% rename from airflow/providers/pinecone/operators/__init__.py rename to providers/src/airflow/providers/pinecone/operators/__init__.py diff --git a/airflow/providers/pinecone/operators/pinecone.py b/providers/src/airflow/providers/pinecone/operators/pinecone.py similarity index 100% rename from airflow/providers/pinecone/operators/pinecone.py rename to providers/src/airflow/providers/pinecone/operators/pinecone.py diff --git a/airflow/providers/pinecone/provider.yaml b/providers/src/airflow/providers/pinecone/provider.yaml similarity index 100% rename from airflow/providers/pinecone/provider.yaml rename to providers/src/airflow/providers/pinecone/provider.yaml diff --git a/airflow/providers/postgres/.latest-doc-only-change.txt b/providers/src/airflow/providers/postgres/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/postgres/.latest-doc-only-change.txt rename to providers/src/airflow/providers/postgres/.latest-doc-only-change.txt diff --git a/airflow/providers/postgres/CHANGELOG.rst b/providers/src/airflow/providers/postgres/CHANGELOG.rst similarity index 100% rename from airflow/providers/postgres/CHANGELOG.rst rename to providers/src/airflow/providers/postgres/CHANGELOG.rst diff --git a/airflow/providers/postgres/__init__.py b/providers/src/airflow/providers/postgres/__init__.py similarity index 100% rename from airflow/providers/postgres/__init__.py rename to providers/src/airflow/providers/postgres/__init__.py diff --git a/airflow/providers/postgres/assets/__init__.py b/providers/src/airflow/providers/postgres/assets/__init__.py similarity index 100% rename from airflow/providers/postgres/assets/__init__.py rename to providers/src/airflow/providers/postgres/assets/__init__.py diff --git a/airflow/providers/postgres/assets/postgres.py b/providers/src/airflow/providers/postgres/assets/postgres.py similarity index 100% rename from airflow/providers/postgres/assets/postgres.py rename to providers/src/airflow/providers/postgres/assets/postgres.py diff --git a/airflow/providers/papermill/hooks/__init__.py b/providers/src/airflow/providers/postgres/hooks/__init__.py similarity index 100% rename from airflow/providers/papermill/hooks/__init__.py rename to providers/src/airflow/providers/postgres/hooks/__init__.py diff --git a/airflow/providers/postgres/hooks/postgres.py b/providers/src/airflow/providers/postgres/hooks/postgres.py similarity index 100% rename from airflow/providers/postgres/hooks/postgres.py rename to providers/src/airflow/providers/postgres/hooks/postgres.py diff --git a/airflow/providers/papermill/operators/__init__.py b/providers/src/airflow/providers/postgres/operators/__init__.py similarity index 100% rename from airflow/providers/papermill/operators/__init__.py rename to providers/src/airflow/providers/postgres/operators/__init__.py diff --git a/airflow/providers/postgres/operators/postgres.py b/providers/src/airflow/providers/postgres/operators/postgres.py similarity index 100% rename from airflow/providers/postgres/operators/postgres.py rename to providers/src/airflow/providers/postgres/operators/postgres.py diff --git a/airflow/providers/postgres/provider.yaml b/providers/src/airflow/providers/postgres/provider.yaml similarity index 100% rename from airflow/providers/postgres/provider.yaml rename to providers/src/airflow/providers/postgres/provider.yaml diff --git a/airflow/providers/presto/.latest-doc-only-change.txt b/providers/src/airflow/providers/presto/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/presto/.latest-doc-only-change.txt rename to providers/src/airflow/providers/presto/.latest-doc-only-change.txt diff --git a/airflow/providers/presto/CHANGELOG.rst b/providers/src/airflow/providers/presto/CHANGELOG.rst similarity index 100% rename from airflow/providers/presto/CHANGELOG.rst rename to providers/src/airflow/providers/presto/CHANGELOG.rst diff --git a/airflow/providers/presto/__init__.py b/providers/src/airflow/providers/presto/__init__.py similarity index 100% rename from airflow/providers/presto/__init__.py rename to providers/src/airflow/providers/presto/__init__.py diff --git a/airflow/providers/postgres/hooks/__init__.py b/providers/src/airflow/providers/presto/hooks/__init__.py similarity index 100% rename from airflow/providers/postgres/hooks/__init__.py rename to providers/src/airflow/providers/presto/hooks/__init__.py diff --git a/airflow/providers/presto/hooks/presto.py b/providers/src/airflow/providers/presto/hooks/presto.py similarity index 100% rename from airflow/providers/presto/hooks/presto.py rename to providers/src/airflow/providers/presto/hooks/presto.py diff --git a/airflow/providers/presto/provider.yaml b/providers/src/airflow/providers/presto/provider.yaml similarity index 100% rename from airflow/providers/presto/provider.yaml rename to providers/src/airflow/providers/presto/provider.yaml diff --git a/airflow/providers/presto/transfers/__init__.py b/providers/src/airflow/providers/presto/transfers/__init__.py similarity index 100% rename from airflow/providers/presto/transfers/__init__.py rename to providers/src/airflow/providers/presto/transfers/__init__.py diff --git a/airflow/providers/presto/transfers/gcs_to_presto.py b/providers/src/airflow/providers/presto/transfers/gcs_to_presto.py similarity index 100% rename from airflow/providers/presto/transfers/gcs_to_presto.py rename to providers/src/airflow/providers/presto/transfers/gcs_to_presto.py diff --git a/airflow/providers/qdrant/.latest-doc-only-change.txt b/providers/src/airflow/providers/qdrant/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/qdrant/.latest-doc-only-change.txt rename to providers/src/airflow/providers/qdrant/.latest-doc-only-change.txt diff --git a/airflow/providers/qdrant/CHANGELOG.rst b/providers/src/airflow/providers/qdrant/CHANGELOG.rst similarity index 100% rename from airflow/providers/qdrant/CHANGELOG.rst rename to providers/src/airflow/providers/qdrant/CHANGELOG.rst diff --git a/airflow/providers/qdrant/__init__.py b/providers/src/airflow/providers/qdrant/__init__.py similarity index 100% rename from airflow/providers/qdrant/__init__.py rename to providers/src/airflow/providers/qdrant/__init__.py diff --git a/airflow/providers/qdrant/hooks/__init__.py b/providers/src/airflow/providers/qdrant/hooks/__init__.py similarity index 100% rename from airflow/providers/qdrant/hooks/__init__.py rename to providers/src/airflow/providers/qdrant/hooks/__init__.py diff --git a/airflow/providers/qdrant/hooks/qdrant.py b/providers/src/airflow/providers/qdrant/hooks/qdrant.py similarity index 100% rename from airflow/providers/qdrant/hooks/qdrant.py rename to providers/src/airflow/providers/qdrant/hooks/qdrant.py diff --git a/airflow/providers/qdrant/operators/__init__.py b/providers/src/airflow/providers/qdrant/operators/__init__.py similarity index 100% rename from airflow/providers/qdrant/operators/__init__.py rename to providers/src/airflow/providers/qdrant/operators/__init__.py diff --git a/airflow/providers/qdrant/operators/qdrant.py b/providers/src/airflow/providers/qdrant/operators/qdrant.py similarity index 100% rename from airflow/providers/qdrant/operators/qdrant.py rename to providers/src/airflow/providers/qdrant/operators/qdrant.py diff --git a/airflow/providers/qdrant/provider.yaml b/providers/src/airflow/providers/qdrant/provider.yaml similarity index 100% rename from airflow/providers/qdrant/provider.yaml rename to providers/src/airflow/providers/qdrant/provider.yaml diff --git a/airflow/providers/redis/.latest-doc-only-change.txt b/providers/src/airflow/providers/redis/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/redis/.latest-doc-only-change.txt rename to providers/src/airflow/providers/redis/.latest-doc-only-change.txt diff --git a/airflow/providers/redis/CHANGELOG.rst b/providers/src/airflow/providers/redis/CHANGELOG.rst similarity index 100% rename from airflow/providers/redis/CHANGELOG.rst rename to providers/src/airflow/providers/redis/CHANGELOG.rst diff --git a/airflow/providers/redis/__init__.py b/providers/src/airflow/providers/redis/__init__.py similarity index 100% rename from airflow/providers/redis/__init__.py rename to providers/src/airflow/providers/redis/__init__.py diff --git a/airflow/providers/postgres/operators/__init__.py b/providers/src/airflow/providers/redis/hooks/__init__.py similarity index 100% rename from airflow/providers/postgres/operators/__init__.py rename to providers/src/airflow/providers/redis/hooks/__init__.py diff --git a/airflow/providers/redis/hooks/redis.py b/providers/src/airflow/providers/redis/hooks/redis.py similarity index 100% rename from airflow/providers/redis/hooks/redis.py rename to providers/src/airflow/providers/redis/hooks/redis.py diff --git a/airflow/providers/presto/hooks/__init__.py b/providers/src/airflow/providers/redis/log/__init__.py similarity index 100% rename from airflow/providers/presto/hooks/__init__.py rename to providers/src/airflow/providers/redis/log/__init__.py diff --git a/airflow/providers/redis/log/redis_task_handler.py b/providers/src/airflow/providers/redis/log/redis_task_handler.py similarity index 100% rename from airflow/providers/redis/log/redis_task_handler.py rename to providers/src/airflow/providers/redis/log/redis_task_handler.py diff --git a/airflow/providers/redis/hooks/__init__.py b/providers/src/airflow/providers/redis/operators/__init__.py similarity index 100% rename from airflow/providers/redis/hooks/__init__.py rename to providers/src/airflow/providers/redis/operators/__init__.py diff --git a/airflow/providers/redis/operators/redis_publish.py b/providers/src/airflow/providers/redis/operators/redis_publish.py similarity index 100% rename from airflow/providers/redis/operators/redis_publish.py rename to providers/src/airflow/providers/redis/operators/redis_publish.py diff --git a/airflow/providers/redis/provider.yaml b/providers/src/airflow/providers/redis/provider.yaml similarity index 100% rename from airflow/providers/redis/provider.yaml rename to providers/src/airflow/providers/redis/provider.yaml diff --git a/airflow/providers/redis/log/__init__.py b/providers/src/airflow/providers/redis/sensors/__init__.py similarity index 100% rename from airflow/providers/redis/log/__init__.py rename to providers/src/airflow/providers/redis/sensors/__init__.py diff --git a/airflow/providers/redis/sensors/redis_key.py b/providers/src/airflow/providers/redis/sensors/redis_key.py similarity index 100% rename from airflow/providers/redis/sensors/redis_key.py rename to providers/src/airflow/providers/redis/sensors/redis_key.py diff --git a/airflow/providers/redis/sensors/redis_pub_sub.py b/providers/src/airflow/providers/redis/sensors/redis_pub_sub.py similarity index 100% rename from airflow/providers/redis/sensors/redis_pub_sub.py rename to providers/src/airflow/providers/redis/sensors/redis_pub_sub.py diff --git a/airflow/providers/salesforce/.latest-doc-only-change.txt b/providers/src/airflow/providers/salesforce/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/salesforce/.latest-doc-only-change.txt rename to providers/src/airflow/providers/salesforce/.latest-doc-only-change.txt diff --git a/airflow/providers/salesforce/CHANGELOG.rst b/providers/src/airflow/providers/salesforce/CHANGELOG.rst similarity index 100% rename from airflow/providers/salesforce/CHANGELOG.rst rename to providers/src/airflow/providers/salesforce/CHANGELOG.rst diff --git a/airflow/providers/salesforce/__init__.py b/providers/src/airflow/providers/salesforce/__init__.py similarity index 100% rename from airflow/providers/salesforce/__init__.py rename to providers/src/airflow/providers/salesforce/__init__.py diff --git a/airflow/providers/redis/operators/__init__.py b/providers/src/airflow/providers/salesforce/hooks/__init__.py similarity index 100% rename from airflow/providers/redis/operators/__init__.py rename to providers/src/airflow/providers/salesforce/hooks/__init__.py diff --git a/airflow/providers/salesforce/hooks/salesforce.py b/providers/src/airflow/providers/salesforce/hooks/salesforce.py similarity index 100% rename from airflow/providers/salesforce/hooks/salesforce.py rename to providers/src/airflow/providers/salesforce/hooks/salesforce.py diff --git a/airflow/providers/salesforce/operators/__init__.py b/providers/src/airflow/providers/salesforce/operators/__init__.py similarity index 100% rename from airflow/providers/salesforce/operators/__init__.py rename to providers/src/airflow/providers/salesforce/operators/__init__.py diff --git a/airflow/providers/salesforce/operators/bulk.py b/providers/src/airflow/providers/salesforce/operators/bulk.py similarity index 100% rename from airflow/providers/salesforce/operators/bulk.py rename to providers/src/airflow/providers/salesforce/operators/bulk.py diff --git a/airflow/providers/salesforce/operators/salesforce_apex_rest.py b/providers/src/airflow/providers/salesforce/operators/salesforce_apex_rest.py similarity index 100% rename from airflow/providers/salesforce/operators/salesforce_apex_rest.py rename to providers/src/airflow/providers/salesforce/operators/salesforce_apex_rest.py diff --git a/airflow/providers/salesforce/provider.yaml b/providers/src/airflow/providers/salesforce/provider.yaml similarity index 100% rename from airflow/providers/salesforce/provider.yaml rename to providers/src/airflow/providers/salesforce/provider.yaml diff --git a/airflow/providers/samba/.latest-doc-only-change.txt b/providers/src/airflow/providers/samba/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/samba/.latest-doc-only-change.txt rename to providers/src/airflow/providers/samba/.latest-doc-only-change.txt diff --git a/airflow/providers/samba/CHANGELOG.rst b/providers/src/airflow/providers/samba/CHANGELOG.rst similarity index 100% rename from airflow/providers/samba/CHANGELOG.rst rename to providers/src/airflow/providers/samba/CHANGELOG.rst diff --git a/airflow/providers/samba/__init__.py b/providers/src/airflow/providers/samba/__init__.py similarity index 100% rename from airflow/providers/samba/__init__.py rename to providers/src/airflow/providers/samba/__init__.py diff --git a/airflow/providers/redis/sensors/__init__.py b/providers/src/airflow/providers/samba/hooks/__init__.py similarity index 100% rename from airflow/providers/redis/sensors/__init__.py rename to providers/src/airflow/providers/samba/hooks/__init__.py diff --git a/airflow/providers/samba/hooks/samba.py b/providers/src/airflow/providers/samba/hooks/samba.py similarity index 100% rename from airflow/providers/samba/hooks/samba.py rename to providers/src/airflow/providers/samba/hooks/samba.py diff --git a/airflow/providers/samba/provider.yaml b/providers/src/airflow/providers/samba/provider.yaml similarity index 100% rename from airflow/providers/samba/provider.yaml rename to providers/src/airflow/providers/samba/provider.yaml diff --git a/airflow/providers/samba/transfers/__init__.py b/providers/src/airflow/providers/samba/transfers/__init__.py similarity index 100% rename from airflow/providers/samba/transfers/__init__.py rename to providers/src/airflow/providers/samba/transfers/__init__.py diff --git a/airflow/providers/samba/transfers/gcs_to_samba.py b/providers/src/airflow/providers/samba/transfers/gcs_to_samba.py similarity index 100% rename from airflow/providers/samba/transfers/gcs_to_samba.py rename to providers/src/airflow/providers/samba/transfers/gcs_to_samba.py diff --git a/airflow/providers/segment/.latest-doc-only-change.txt b/providers/src/airflow/providers/segment/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/segment/.latest-doc-only-change.txt rename to providers/src/airflow/providers/segment/.latest-doc-only-change.txt diff --git a/airflow/providers/segment/CHANGELOG.rst b/providers/src/airflow/providers/segment/CHANGELOG.rst similarity index 100% rename from airflow/providers/segment/CHANGELOG.rst rename to providers/src/airflow/providers/segment/CHANGELOG.rst diff --git a/airflow/providers/segment/__init__.py b/providers/src/airflow/providers/segment/__init__.py similarity index 100% rename from airflow/providers/segment/__init__.py rename to providers/src/airflow/providers/segment/__init__.py diff --git a/airflow/providers/salesforce/hooks/__init__.py b/providers/src/airflow/providers/segment/hooks/__init__.py similarity index 100% rename from airflow/providers/salesforce/hooks/__init__.py rename to providers/src/airflow/providers/segment/hooks/__init__.py diff --git a/airflow/providers/segment/hooks/segment.py b/providers/src/airflow/providers/segment/hooks/segment.py similarity index 100% rename from airflow/providers/segment/hooks/segment.py rename to providers/src/airflow/providers/segment/hooks/segment.py diff --git a/airflow/providers/samba/hooks/__init__.py b/providers/src/airflow/providers/segment/operators/__init__.py similarity index 100% rename from airflow/providers/samba/hooks/__init__.py rename to providers/src/airflow/providers/segment/operators/__init__.py diff --git a/airflow/providers/segment/operators/segment_track_event.py b/providers/src/airflow/providers/segment/operators/segment_track_event.py similarity index 100% rename from airflow/providers/segment/operators/segment_track_event.py rename to providers/src/airflow/providers/segment/operators/segment_track_event.py diff --git a/airflow/providers/segment/provider.yaml b/providers/src/airflow/providers/segment/provider.yaml similarity index 100% rename from airflow/providers/segment/provider.yaml rename to providers/src/airflow/providers/segment/provider.yaml diff --git a/airflow/providers/sendgrid/.latest-doc-only-change.txt b/providers/src/airflow/providers/sendgrid/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/sendgrid/.latest-doc-only-change.txt rename to providers/src/airflow/providers/sendgrid/.latest-doc-only-change.txt diff --git a/airflow/providers/sendgrid/CHANGELOG.rst b/providers/src/airflow/providers/sendgrid/CHANGELOG.rst similarity index 100% rename from airflow/providers/sendgrid/CHANGELOG.rst rename to providers/src/airflow/providers/sendgrid/CHANGELOG.rst diff --git a/airflow/providers/sendgrid/__init__.py b/providers/src/airflow/providers/sendgrid/__init__.py similarity index 100% rename from airflow/providers/sendgrid/__init__.py rename to providers/src/airflow/providers/sendgrid/__init__.py diff --git a/airflow/providers/sendgrid/provider.yaml b/providers/src/airflow/providers/sendgrid/provider.yaml similarity index 100% rename from airflow/providers/sendgrid/provider.yaml rename to providers/src/airflow/providers/sendgrid/provider.yaml diff --git a/airflow/providers/sendgrid/utils/__init__.py b/providers/src/airflow/providers/sendgrid/utils/__init__.py similarity index 100% rename from airflow/providers/sendgrid/utils/__init__.py rename to providers/src/airflow/providers/sendgrid/utils/__init__.py diff --git a/airflow/providers/sendgrid/utils/emailer.py b/providers/src/airflow/providers/sendgrid/utils/emailer.py similarity index 100% rename from airflow/providers/sendgrid/utils/emailer.py rename to providers/src/airflow/providers/sendgrid/utils/emailer.py diff --git a/airflow/providers/sftp/.latest-doc-only-change.txt b/providers/src/airflow/providers/sftp/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/sftp/.latest-doc-only-change.txt rename to providers/src/airflow/providers/sftp/.latest-doc-only-change.txt diff --git a/airflow/providers/sftp/CHANGELOG.rst b/providers/src/airflow/providers/sftp/CHANGELOG.rst similarity index 100% rename from airflow/providers/sftp/CHANGELOG.rst rename to providers/src/airflow/providers/sftp/CHANGELOG.rst diff --git a/airflow/providers/sftp/__init__.py b/providers/src/airflow/providers/sftp/__init__.py similarity index 100% rename from airflow/providers/sftp/__init__.py rename to providers/src/airflow/providers/sftp/__init__.py diff --git a/airflow/providers/sftp/decorators/__init__.py b/providers/src/airflow/providers/sftp/decorators/__init__.py similarity index 100% rename from airflow/providers/sftp/decorators/__init__.py rename to providers/src/airflow/providers/sftp/decorators/__init__.py diff --git a/airflow/providers/sftp/decorators/sensors/__init__.py b/providers/src/airflow/providers/sftp/decorators/sensors/__init__.py similarity index 100% rename from airflow/providers/sftp/decorators/sensors/__init__.py rename to providers/src/airflow/providers/sftp/decorators/sensors/__init__.py diff --git a/airflow/providers/sftp/decorators/sensors/sftp.py b/providers/src/airflow/providers/sftp/decorators/sensors/sftp.py similarity index 100% rename from airflow/providers/sftp/decorators/sensors/sftp.py rename to providers/src/airflow/providers/sftp/decorators/sensors/sftp.py diff --git a/airflow/providers/sftp/hooks/__init__.py b/providers/src/airflow/providers/sftp/hooks/__init__.py similarity index 100% rename from airflow/providers/sftp/hooks/__init__.py rename to providers/src/airflow/providers/sftp/hooks/__init__.py diff --git a/airflow/providers/sftp/hooks/sftp.py b/providers/src/airflow/providers/sftp/hooks/sftp.py similarity index 100% rename from airflow/providers/sftp/hooks/sftp.py rename to providers/src/airflow/providers/sftp/hooks/sftp.py diff --git a/airflow/providers/sftp/operators/__init__.py b/providers/src/airflow/providers/sftp/operators/__init__.py similarity index 100% rename from airflow/providers/sftp/operators/__init__.py rename to providers/src/airflow/providers/sftp/operators/__init__.py diff --git a/airflow/providers/sftp/operators/sftp.py b/providers/src/airflow/providers/sftp/operators/sftp.py similarity index 100% rename from airflow/providers/sftp/operators/sftp.py rename to providers/src/airflow/providers/sftp/operators/sftp.py diff --git a/airflow/providers/sftp/provider.yaml b/providers/src/airflow/providers/sftp/provider.yaml similarity index 100% rename from airflow/providers/sftp/provider.yaml rename to providers/src/airflow/providers/sftp/provider.yaml diff --git a/airflow/providers/sftp/sensors/__init__.py b/providers/src/airflow/providers/sftp/sensors/__init__.py similarity index 100% rename from airflow/providers/sftp/sensors/__init__.py rename to providers/src/airflow/providers/sftp/sensors/__init__.py diff --git a/airflow/providers/sftp/sensors/sftp.py b/providers/src/airflow/providers/sftp/sensors/sftp.py similarity index 100% rename from airflow/providers/sftp/sensors/sftp.py rename to providers/src/airflow/providers/sftp/sensors/sftp.py diff --git a/airflow/providers/sftp/triggers/__init__.py b/providers/src/airflow/providers/sftp/triggers/__init__.py similarity index 100% rename from airflow/providers/sftp/triggers/__init__.py rename to providers/src/airflow/providers/sftp/triggers/__init__.py diff --git a/airflow/providers/sftp/triggers/sftp.py b/providers/src/airflow/providers/sftp/triggers/sftp.py similarity index 100% rename from airflow/providers/sftp/triggers/sftp.py rename to providers/src/airflow/providers/sftp/triggers/sftp.py diff --git a/airflow/providers/singularity/.latest-doc-only-change.txt b/providers/src/airflow/providers/singularity/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/singularity/.latest-doc-only-change.txt rename to providers/src/airflow/providers/singularity/.latest-doc-only-change.txt diff --git a/airflow/providers/singularity/CHANGELOG.rst b/providers/src/airflow/providers/singularity/CHANGELOG.rst similarity index 100% rename from airflow/providers/singularity/CHANGELOG.rst rename to providers/src/airflow/providers/singularity/CHANGELOG.rst diff --git a/airflow/providers/singularity/__init__.py b/providers/src/airflow/providers/singularity/__init__.py similarity index 100% rename from airflow/providers/singularity/__init__.py rename to providers/src/airflow/providers/singularity/__init__.py diff --git a/airflow/providers/segment/hooks/__init__.py b/providers/src/airflow/providers/singularity/operators/__init__.py similarity index 100% rename from airflow/providers/segment/hooks/__init__.py rename to providers/src/airflow/providers/singularity/operators/__init__.py diff --git a/airflow/providers/singularity/operators/singularity.py b/providers/src/airflow/providers/singularity/operators/singularity.py similarity index 100% rename from airflow/providers/singularity/operators/singularity.py rename to providers/src/airflow/providers/singularity/operators/singularity.py diff --git a/airflow/providers/singularity/provider.yaml b/providers/src/airflow/providers/singularity/provider.yaml similarity index 100% rename from airflow/providers/singularity/provider.yaml rename to providers/src/airflow/providers/singularity/provider.yaml diff --git a/airflow/providers/slack/.latest-doc-only-change.txt b/providers/src/airflow/providers/slack/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/slack/.latest-doc-only-change.txt rename to providers/src/airflow/providers/slack/.latest-doc-only-change.txt diff --git a/airflow/providers/slack/CHANGELOG.rst b/providers/src/airflow/providers/slack/CHANGELOG.rst similarity index 100% rename from airflow/providers/slack/CHANGELOG.rst rename to providers/src/airflow/providers/slack/CHANGELOG.rst diff --git a/airflow/providers/slack/__init__.py b/providers/src/airflow/providers/slack/__init__.py similarity index 100% rename from airflow/providers/slack/__init__.py rename to providers/src/airflow/providers/slack/__init__.py diff --git a/airflow/providers/segment/operators/__init__.py b/providers/src/airflow/providers/slack/hooks/__init__.py similarity index 100% rename from airflow/providers/segment/operators/__init__.py rename to providers/src/airflow/providers/slack/hooks/__init__.py diff --git a/airflow/providers/slack/hooks/slack.py b/providers/src/airflow/providers/slack/hooks/slack.py similarity index 100% rename from airflow/providers/slack/hooks/slack.py rename to providers/src/airflow/providers/slack/hooks/slack.py diff --git a/airflow/providers/slack/hooks/slack_webhook.py b/providers/src/airflow/providers/slack/hooks/slack_webhook.py similarity index 100% rename from airflow/providers/slack/hooks/slack_webhook.py rename to providers/src/airflow/providers/slack/hooks/slack_webhook.py diff --git a/airflow/providers/slack/notifications/__init__.py b/providers/src/airflow/providers/slack/notifications/__init__.py similarity index 100% rename from airflow/providers/slack/notifications/__init__.py rename to providers/src/airflow/providers/slack/notifications/__init__.py diff --git a/airflow/providers/slack/notifications/slack.py b/providers/src/airflow/providers/slack/notifications/slack.py similarity index 100% rename from airflow/providers/slack/notifications/slack.py rename to providers/src/airflow/providers/slack/notifications/slack.py diff --git a/airflow/providers/slack/notifications/slack_notifier.py b/providers/src/airflow/providers/slack/notifications/slack_notifier.py similarity index 100% rename from airflow/providers/slack/notifications/slack_notifier.py rename to providers/src/airflow/providers/slack/notifications/slack_notifier.py diff --git a/airflow/providers/slack/notifications/slack_webhook.py b/providers/src/airflow/providers/slack/notifications/slack_webhook.py similarity index 100% rename from airflow/providers/slack/notifications/slack_webhook.py rename to providers/src/airflow/providers/slack/notifications/slack_webhook.py diff --git a/airflow/providers/singularity/operators/__init__.py b/providers/src/airflow/providers/slack/operators/__init__.py similarity index 100% rename from airflow/providers/singularity/operators/__init__.py rename to providers/src/airflow/providers/slack/operators/__init__.py diff --git a/airflow/providers/slack/operators/slack.py b/providers/src/airflow/providers/slack/operators/slack.py similarity index 100% rename from airflow/providers/slack/operators/slack.py rename to providers/src/airflow/providers/slack/operators/slack.py diff --git a/airflow/providers/slack/operators/slack_webhook.py b/providers/src/airflow/providers/slack/operators/slack_webhook.py similarity index 100% rename from airflow/providers/slack/operators/slack_webhook.py rename to providers/src/airflow/providers/slack/operators/slack_webhook.py diff --git a/airflow/providers/slack/provider.yaml b/providers/src/airflow/providers/slack/provider.yaml similarity index 100% rename from airflow/providers/slack/provider.yaml rename to providers/src/airflow/providers/slack/provider.yaml diff --git a/airflow/providers/slack/transfers/__init__.py b/providers/src/airflow/providers/slack/transfers/__init__.py similarity index 100% rename from airflow/providers/slack/transfers/__init__.py rename to providers/src/airflow/providers/slack/transfers/__init__.py diff --git a/airflow/providers/slack/transfers/base_sql_to_slack.py b/providers/src/airflow/providers/slack/transfers/base_sql_to_slack.py similarity index 100% rename from airflow/providers/slack/transfers/base_sql_to_slack.py rename to providers/src/airflow/providers/slack/transfers/base_sql_to_slack.py diff --git a/airflow/providers/slack/transfers/sql_to_slack.py b/providers/src/airflow/providers/slack/transfers/sql_to_slack.py similarity index 100% rename from airflow/providers/slack/transfers/sql_to_slack.py rename to providers/src/airflow/providers/slack/transfers/sql_to_slack.py diff --git a/airflow/providers/slack/transfers/sql_to_slack_webhook.py b/providers/src/airflow/providers/slack/transfers/sql_to_slack_webhook.py similarity index 100% rename from airflow/providers/slack/transfers/sql_to_slack_webhook.py rename to providers/src/airflow/providers/slack/transfers/sql_to_slack_webhook.py diff --git a/airflow/providers/slack/utils/__init__.py b/providers/src/airflow/providers/slack/utils/__init__.py similarity index 100% rename from airflow/providers/slack/utils/__init__.py rename to providers/src/airflow/providers/slack/utils/__init__.py diff --git a/airflow/providers/smtp/.latest-doc-only-change.txt b/providers/src/airflow/providers/smtp/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/smtp/.latest-doc-only-change.txt rename to providers/src/airflow/providers/smtp/.latest-doc-only-change.txt diff --git a/airflow/providers/smtp/CHANGELOG.rst b/providers/src/airflow/providers/smtp/CHANGELOG.rst similarity index 100% rename from airflow/providers/smtp/CHANGELOG.rst rename to providers/src/airflow/providers/smtp/CHANGELOG.rst diff --git a/airflow/providers/smtp/__init__.py b/providers/src/airflow/providers/smtp/__init__.py similarity index 100% rename from airflow/providers/smtp/__init__.py rename to providers/src/airflow/providers/smtp/__init__.py diff --git a/airflow/providers/slack/hooks/__init__.py b/providers/src/airflow/providers/smtp/hooks/__init__.py similarity index 100% rename from airflow/providers/slack/hooks/__init__.py rename to providers/src/airflow/providers/smtp/hooks/__init__.py diff --git a/airflow/providers/smtp/hooks/smtp.py b/providers/src/airflow/providers/smtp/hooks/smtp.py similarity index 100% rename from airflow/providers/smtp/hooks/smtp.py rename to providers/src/airflow/providers/smtp/hooks/smtp.py diff --git a/airflow/providers/smtp/notifications/__init__.py b/providers/src/airflow/providers/smtp/notifications/__init__.py similarity index 100% rename from airflow/providers/smtp/notifications/__init__.py rename to providers/src/airflow/providers/smtp/notifications/__init__.py diff --git a/airflow/providers/smtp/notifications/smtp.py b/providers/src/airflow/providers/smtp/notifications/smtp.py similarity index 100% rename from airflow/providers/smtp/notifications/smtp.py rename to providers/src/airflow/providers/smtp/notifications/smtp.py diff --git a/airflow/providers/smtp/notifications/templates/__init__.py b/providers/src/airflow/providers/smtp/notifications/templates/__init__.py similarity index 100% rename from airflow/providers/smtp/notifications/templates/__init__.py rename to providers/src/airflow/providers/smtp/notifications/templates/__init__.py diff --git a/airflow/providers/smtp/notifications/templates/email.html b/providers/src/airflow/providers/smtp/notifications/templates/email.html similarity index 100% rename from airflow/providers/smtp/notifications/templates/email.html rename to providers/src/airflow/providers/smtp/notifications/templates/email.html diff --git a/airflow/providers/smtp/notifications/templates/email_subject.jinja2 b/providers/src/airflow/providers/smtp/notifications/templates/email_subject.jinja2 similarity index 100% rename from airflow/providers/smtp/notifications/templates/email_subject.jinja2 rename to providers/src/airflow/providers/smtp/notifications/templates/email_subject.jinja2 diff --git a/airflow/providers/slack/operators/__init__.py b/providers/src/airflow/providers/smtp/operators/__init__.py similarity index 100% rename from airflow/providers/slack/operators/__init__.py rename to providers/src/airflow/providers/smtp/operators/__init__.py diff --git a/airflow/providers/smtp/operators/smtp.py b/providers/src/airflow/providers/smtp/operators/smtp.py similarity index 100% rename from airflow/providers/smtp/operators/smtp.py rename to providers/src/airflow/providers/smtp/operators/smtp.py diff --git a/airflow/providers/smtp/provider.yaml b/providers/src/airflow/providers/smtp/provider.yaml similarity index 100% rename from airflow/providers/smtp/provider.yaml rename to providers/src/airflow/providers/smtp/provider.yaml diff --git a/airflow/providers/snowflake/.latest-doc-only-change.txt b/providers/src/airflow/providers/snowflake/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/snowflake/.latest-doc-only-change.txt rename to providers/src/airflow/providers/snowflake/.latest-doc-only-change.txt diff --git a/airflow/providers/snowflake/CHANGELOG.rst b/providers/src/airflow/providers/snowflake/CHANGELOG.rst similarity index 100% rename from airflow/providers/snowflake/CHANGELOG.rst rename to providers/src/airflow/providers/snowflake/CHANGELOG.rst diff --git a/airflow/providers/snowflake/__init__.py b/providers/src/airflow/providers/snowflake/__init__.py similarity index 100% rename from airflow/providers/snowflake/__init__.py rename to providers/src/airflow/providers/snowflake/__init__.py diff --git a/airflow/providers/snowflake/decorators/__init__.py b/providers/src/airflow/providers/snowflake/decorators/__init__.py similarity index 100% rename from airflow/providers/snowflake/decorators/__init__.py rename to providers/src/airflow/providers/snowflake/decorators/__init__.py diff --git a/airflow/providers/snowflake/decorators/snowpark.py b/providers/src/airflow/providers/snowflake/decorators/snowpark.py similarity index 100% rename from airflow/providers/snowflake/decorators/snowpark.py rename to providers/src/airflow/providers/snowflake/decorators/snowpark.py diff --git a/airflow/providers/snowflake/hooks/__init__.py b/providers/src/airflow/providers/snowflake/hooks/__init__.py similarity index 100% rename from airflow/providers/snowflake/hooks/__init__.py rename to providers/src/airflow/providers/snowflake/hooks/__init__.py diff --git a/airflow/providers/snowflake/hooks/snowflake.py b/providers/src/airflow/providers/snowflake/hooks/snowflake.py similarity index 100% rename from airflow/providers/snowflake/hooks/snowflake.py rename to providers/src/airflow/providers/snowflake/hooks/snowflake.py diff --git a/airflow/providers/snowflake/hooks/snowflake_sql_api.py b/providers/src/airflow/providers/snowflake/hooks/snowflake_sql_api.py similarity index 99% rename from airflow/providers/snowflake/hooks/snowflake_sql_api.py rename to providers/src/airflow/providers/snowflake/hooks/snowflake_sql_api.py index 5143f573c2637..8770492d06ec1 100644 --- a/airflow/providers/snowflake/hooks/snowflake_sql_api.py +++ b/providers/src/airflow/providers/snowflake/hooks/snowflake_sql_api.py @@ -327,9 +327,10 @@ async def get_sql_api_query_status_async(self, query_id: str) -> dict[str, str | """ self.log.info("Retrieving status for query id %s", query_id) header, params, url = self.get_request_url_header_params(query_id) - async with aiohttp.ClientSession(headers=header) as session, session.get( - url, params=params - ) as response: + async with ( + aiohttp.ClientSession(headers=header) as session, + session.get(url, params=params) as response, + ): status_code = response.status resp = await response.json() return self._process_response(status_code, resp) diff --git a/airflow/providers/snowflake/operators/__init__.py b/providers/src/airflow/providers/snowflake/operators/__init__.py similarity index 100% rename from airflow/providers/snowflake/operators/__init__.py rename to providers/src/airflow/providers/snowflake/operators/__init__.py diff --git a/airflow/providers/snowflake/operators/snowflake.py b/providers/src/airflow/providers/snowflake/operators/snowflake.py similarity index 100% rename from airflow/providers/snowflake/operators/snowflake.py rename to providers/src/airflow/providers/snowflake/operators/snowflake.py diff --git a/airflow/providers/snowflake/operators/snowpark.py b/providers/src/airflow/providers/snowflake/operators/snowpark.py similarity index 100% rename from airflow/providers/snowflake/operators/snowpark.py rename to providers/src/airflow/providers/snowflake/operators/snowpark.py diff --git a/airflow/providers/snowflake/provider.yaml b/providers/src/airflow/providers/snowflake/provider.yaml similarity index 100% rename from airflow/providers/snowflake/provider.yaml rename to providers/src/airflow/providers/snowflake/provider.yaml diff --git a/airflow/providers/snowflake/transfers/__init__.py b/providers/src/airflow/providers/snowflake/transfers/__init__.py similarity index 100% rename from airflow/providers/snowflake/transfers/__init__.py rename to providers/src/airflow/providers/snowflake/transfers/__init__.py diff --git a/airflow/providers/snowflake/transfers/copy_into_snowflake.py b/providers/src/airflow/providers/snowflake/transfers/copy_into_snowflake.py similarity index 100% rename from airflow/providers/snowflake/transfers/copy_into_snowflake.py rename to providers/src/airflow/providers/snowflake/transfers/copy_into_snowflake.py diff --git a/airflow/providers/snowflake/triggers/__init__.py b/providers/src/airflow/providers/snowflake/triggers/__init__.py similarity index 100% rename from airflow/providers/snowflake/triggers/__init__.py rename to providers/src/airflow/providers/snowflake/triggers/__init__.py diff --git a/airflow/providers/snowflake/triggers/snowflake_trigger.py b/providers/src/airflow/providers/snowflake/triggers/snowflake_trigger.py similarity index 100% rename from airflow/providers/snowflake/triggers/snowflake_trigger.py rename to providers/src/airflow/providers/snowflake/triggers/snowflake_trigger.py diff --git a/airflow/providers/snowflake/utils/__init__.py b/providers/src/airflow/providers/snowflake/utils/__init__.py similarity index 100% rename from airflow/providers/snowflake/utils/__init__.py rename to providers/src/airflow/providers/snowflake/utils/__init__.py diff --git a/airflow/providers/snowflake/utils/common.py b/providers/src/airflow/providers/snowflake/utils/common.py similarity index 100% rename from airflow/providers/snowflake/utils/common.py rename to providers/src/airflow/providers/snowflake/utils/common.py diff --git a/airflow/providers/snowflake/utils/openlineage.py b/providers/src/airflow/providers/snowflake/utils/openlineage.py similarity index 100% rename from airflow/providers/snowflake/utils/openlineage.py rename to providers/src/airflow/providers/snowflake/utils/openlineage.py diff --git a/airflow/providers/snowflake/utils/snowpark.py b/providers/src/airflow/providers/snowflake/utils/snowpark.py similarity index 100% rename from airflow/providers/snowflake/utils/snowpark.py rename to providers/src/airflow/providers/snowflake/utils/snowpark.py diff --git a/airflow/providers/snowflake/utils/sql_api_generate_jwt.py b/providers/src/airflow/providers/snowflake/utils/sql_api_generate_jwt.py similarity index 100% rename from airflow/providers/snowflake/utils/sql_api_generate_jwt.py rename to providers/src/airflow/providers/snowflake/utils/sql_api_generate_jwt.py diff --git a/airflow/providers/sqlite/.latest-doc-only-change.txt b/providers/src/airflow/providers/sqlite/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/sqlite/.latest-doc-only-change.txt rename to providers/src/airflow/providers/sqlite/.latest-doc-only-change.txt diff --git a/airflow/providers/sqlite/CHANGELOG.rst b/providers/src/airflow/providers/sqlite/CHANGELOG.rst similarity index 100% rename from airflow/providers/sqlite/CHANGELOG.rst rename to providers/src/airflow/providers/sqlite/CHANGELOG.rst diff --git a/airflow/providers/sqlite/__init__.py b/providers/src/airflow/providers/sqlite/__init__.py similarity index 100% rename from airflow/providers/sqlite/__init__.py rename to providers/src/airflow/providers/sqlite/__init__.py diff --git a/airflow/providers/smtp/hooks/__init__.py b/providers/src/airflow/providers/sqlite/hooks/__init__.py similarity index 100% rename from airflow/providers/smtp/hooks/__init__.py rename to providers/src/airflow/providers/sqlite/hooks/__init__.py diff --git a/airflow/providers/sqlite/hooks/sqlite.py b/providers/src/airflow/providers/sqlite/hooks/sqlite.py similarity index 100% rename from airflow/providers/sqlite/hooks/sqlite.py rename to providers/src/airflow/providers/sqlite/hooks/sqlite.py diff --git a/airflow/providers/smtp/operators/__init__.py b/providers/src/airflow/providers/sqlite/operators/__init__.py similarity index 100% rename from airflow/providers/smtp/operators/__init__.py rename to providers/src/airflow/providers/sqlite/operators/__init__.py diff --git a/airflow/providers/sqlite/operators/sqlite.py b/providers/src/airflow/providers/sqlite/operators/sqlite.py similarity index 100% rename from airflow/providers/sqlite/operators/sqlite.py rename to providers/src/airflow/providers/sqlite/operators/sqlite.py diff --git a/airflow/providers/sqlite/provider.yaml b/providers/src/airflow/providers/sqlite/provider.yaml similarity index 100% rename from airflow/providers/sqlite/provider.yaml rename to providers/src/airflow/providers/sqlite/provider.yaml diff --git a/airflow/providers/ssh/.latest-doc-only-change.txt b/providers/src/airflow/providers/ssh/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/ssh/.latest-doc-only-change.txt rename to providers/src/airflow/providers/ssh/.latest-doc-only-change.txt diff --git a/airflow/providers/ssh/CHANGELOG.rst b/providers/src/airflow/providers/ssh/CHANGELOG.rst similarity index 100% rename from airflow/providers/ssh/CHANGELOG.rst rename to providers/src/airflow/providers/ssh/CHANGELOG.rst diff --git a/airflow/providers/ssh/__init__.py b/providers/src/airflow/providers/ssh/__init__.py similarity index 100% rename from airflow/providers/ssh/__init__.py rename to providers/src/airflow/providers/ssh/__init__.py diff --git a/airflow/providers/sqlite/hooks/__init__.py b/providers/src/airflow/providers/ssh/hooks/__init__.py similarity index 100% rename from airflow/providers/sqlite/hooks/__init__.py rename to providers/src/airflow/providers/ssh/hooks/__init__.py diff --git a/airflow/providers/ssh/hooks/ssh.py b/providers/src/airflow/providers/ssh/hooks/ssh.py similarity index 100% rename from airflow/providers/ssh/hooks/ssh.py rename to providers/src/airflow/providers/ssh/hooks/ssh.py diff --git a/airflow/providers/sqlite/operators/__init__.py b/providers/src/airflow/providers/ssh/operators/__init__.py similarity index 100% rename from airflow/providers/sqlite/operators/__init__.py rename to providers/src/airflow/providers/ssh/operators/__init__.py diff --git a/airflow/providers/ssh/operators/ssh.py b/providers/src/airflow/providers/ssh/operators/ssh.py similarity index 100% rename from airflow/providers/ssh/operators/ssh.py rename to providers/src/airflow/providers/ssh/operators/ssh.py diff --git a/airflow/providers/ssh/provider.yaml b/providers/src/airflow/providers/ssh/provider.yaml similarity index 100% rename from airflow/providers/ssh/provider.yaml rename to providers/src/airflow/providers/ssh/provider.yaml diff --git a/airflow/providers/standard/CHANGELOG.rst b/providers/src/airflow/providers/standard/CHANGELOG.rst similarity index 100% rename from airflow/providers/standard/CHANGELOG.rst rename to providers/src/airflow/providers/standard/CHANGELOG.rst diff --git a/airflow/providers/ssh/hooks/__init__.py b/providers/src/airflow/providers/standard/__init__.py similarity index 100% rename from airflow/providers/ssh/hooks/__init__.py rename to providers/src/airflow/providers/standard/__init__.py diff --git a/airflow/providers/standard/operators/__init__.py b/providers/src/airflow/providers/standard/operators/__init__.py similarity index 100% rename from airflow/providers/standard/operators/__init__.py rename to providers/src/airflow/providers/standard/operators/__init__.py diff --git a/airflow/providers/standard/operators/bash.py b/providers/src/airflow/providers/standard/operators/bash.py similarity index 100% rename from airflow/providers/standard/operators/bash.py rename to providers/src/airflow/providers/standard/operators/bash.py diff --git a/airflow/providers/standard/operators/datetime.py b/providers/src/airflow/providers/standard/operators/datetime.py similarity index 100% rename from airflow/providers/standard/operators/datetime.py rename to providers/src/airflow/providers/standard/operators/datetime.py diff --git a/airflow/providers/standard/operators/weekday.py b/providers/src/airflow/providers/standard/operators/weekday.py similarity index 100% rename from airflow/providers/standard/operators/weekday.py rename to providers/src/airflow/providers/standard/operators/weekday.py diff --git a/airflow/providers/standard/provider.yaml b/providers/src/airflow/providers/standard/provider.yaml similarity index 100% rename from airflow/providers/standard/provider.yaml rename to providers/src/airflow/providers/standard/provider.yaml diff --git a/airflow/providers/standard/sensors/__init__.py b/providers/src/airflow/providers/standard/sensors/__init__.py similarity index 100% rename from airflow/providers/standard/sensors/__init__.py rename to providers/src/airflow/providers/standard/sensors/__init__.py diff --git a/airflow/providers/standard/sensors/bash.py b/providers/src/airflow/providers/standard/sensors/bash.py similarity index 100% rename from airflow/providers/standard/sensors/bash.py rename to providers/src/airflow/providers/standard/sensors/bash.py diff --git a/airflow/providers/standard/sensors/date_time.py b/providers/src/airflow/providers/standard/sensors/date_time.py similarity index 100% rename from airflow/providers/standard/sensors/date_time.py rename to providers/src/airflow/providers/standard/sensors/date_time.py diff --git a/airflow/providers/standard/sensors/time.py b/providers/src/airflow/providers/standard/sensors/time.py similarity index 100% rename from airflow/providers/standard/sensors/time.py rename to providers/src/airflow/providers/standard/sensors/time.py diff --git a/airflow/providers/standard/sensors/time_delta.py b/providers/src/airflow/providers/standard/sensors/time_delta.py similarity index 100% rename from airflow/providers/standard/sensors/time_delta.py rename to providers/src/airflow/providers/standard/sensors/time_delta.py diff --git a/airflow/providers/standard/sensors/weekday.py b/providers/src/airflow/providers/standard/sensors/weekday.py similarity index 100% rename from airflow/providers/standard/sensors/weekday.py rename to providers/src/airflow/providers/standard/sensors/weekday.py diff --git a/airflow/providers/tableau/.latest-doc-only-change.txt b/providers/src/airflow/providers/tableau/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/tableau/.latest-doc-only-change.txt rename to providers/src/airflow/providers/tableau/.latest-doc-only-change.txt diff --git a/airflow/providers/tableau/CHANGELOG.rst b/providers/src/airflow/providers/tableau/CHANGELOG.rst similarity index 100% rename from airflow/providers/tableau/CHANGELOG.rst rename to providers/src/airflow/providers/tableau/CHANGELOG.rst diff --git a/airflow/providers/tableau/__init__.py b/providers/src/airflow/providers/tableau/__init__.py similarity index 100% rename from airflow/providers/tableau/__init__.py rename to providers/src/airflow/providers/tableau/__init__.py diff --git a/airflow/providers/ssh/operators/__init__.py b/providers/src/airflow/providers/tableau/hooks/__init__.py similarity index 100% rename from airflow/providers/ssh/operators/__init__.py rename to providers/src/airflow/providers/tableau/hooks/__init__.py diff --git a/airflow/providers/tableau/hooks/tableau.py b/providers/src/airflow/providers/tableau/hooks/tableau.py similarity index 100% rename from airflow/providers/tableau/hooks/tableau.py rename to providers/src/airflow/providers/tableau/hooks/tableau.py diff --git a/airflow/providers/tableau/operators/__init__.py b/providers/src/airflow/providers/tableau/operators/__init__.py similarity index 100% rename from airflow/providers/tableau/operators/__init__.py rename to providers/src/airflow/providers/tableau/operators/__init__.py diff --git a/airflow/providers/tableau/operators/tableau.py b/providers/src/airflow/providers/tableau/operators/tableau.py similarity index 100% rename from airflow/providers/tableau/operators/tableau.py rename to providers/src/airflow/providers/tableau/operators/tableau.py diff --git a/airflow/providers/tableau/provider.yaml b/providers/src/airflow/providers/tableau/provider.yaml similarity index 100% rename from airflow/providers/tableau/provider.yaml rename to providers/src/airflow/providers/tableau/provider.yaml diff --git a/airflow/providers/tableau/sensors/__init__.py b/providers/src/airflow/providers/tableau/sensors/__init__.py similarity index 100% rename from airflow/providers/tableau/sensors/__init__.py rename to providers/src/airflow/providers/tableau/sensors/__init__.py diff --git a/airflow/providers/tableau/sensors/tableau.py b/providers/src/airflow/providers/tableau/sensors/tableau.py similarity index 100% rename from airflow/providers/tableau/sensors/tableau.py rename to providers/src/airflow/providers/tableau/sensors/tableau.py diff --git a/airflow/providers/telegram/.latest-doc-only-change.txt b/providers/src/airflow/providers/telegram/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/telegram/.latest-doc-only-change.txt rename to providers/src/airflow/providers/telegram/.latest-doc-only-change.txt diff --git a/airflow/providers/telegram/CHANGELOG.rst b/providers/src/airflow/providers/telegram/CHANGELOG.rst similarity index 100% rename from airflow/providers/telegram/CHANGELOG.rst rename to providers/src/airflow/providers/telegram/CHANGELOG.rst diff --git a/airflow/providers/telegram/__init__.py b/providers/src/airflow/providers/telegram/__init__.py similarity index 100% rename from airflow/providers/telegram/__init__.py rename to providers/src/airflow/providers/telegram/__init__.py diff --git a/airflow/providers/standard/__init__.py b/providers/src/airflow/providers/telegram/hooks/__init__.py similarity index 100% rename from airflow/providers/standard/__init__.py rename to providers/src/airflow/providers/telegram/hooks/__init__.py diff --git a/airflow/providers/telegram/hooks/telegram.py b/providers/src/airflow/providers/telegram/hooks/telegram.py similarity index 100% rename from airflow/providers/telegram/hooks/telegram.py rename to providers/src/airflow/providers/telegram/hooks/telegram.py diff --git a/airflow/providers/tableau/hooks/__init__.py b/providers/src/airflow/providers/telegram/operators/__init__.py similarity index 100% rename from airflow/providers/tableau/hooks/__init__.py rename to providers/src/airflow/providers/telegram/operators/__init__.py diff --git a/airflow/providers/telegram/operators/telegram.py b/providers/src/airflow/providers/telegram/operators/telegram.py similarity index 100% rename from airflow/providers/telegram/operators/telegram.py rename to providers/src/airflow/providers/telegram/operators/telegram.py diff --git a/airflow/providers/telegram/provider.yaml b/providers/src/airflow/providers/telegram/provider.yaml similarity index 100% rename from airflow/providers/telegram/provider.yaml rename to providers/src/airflow/providers/telegram/provider.yaml diff --git a/airflow/providers/teradata/.latest-doc-only-change.txt b/providers/src/airflow/providers/teradata/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/teradata/.latest-doc-only-change.txt rename to providers/src/airflow/providers/teradata/.latest-doc-only-change.txt diff --git a/airflow/providers/teradata/CHANGELOG.rst b/providers/src/airflow/providers/teradata/CHANGELOG.rst similarity index 100% rename from airflow/providers/teradata/CHANGELOG.rst rename to providers/src/airflow/providers/teradata/CHANGELOG.rst diff --git a/airflow/providers/teradata/__init__.py b/providers/src/airflow/providers/teradata/__init__.py similarity index 100% rename from airflow/providers/teradata/__init__.py rename to providers/src/airflow/providers/teradata/__init__.py diff --git a/airflow/providers/telegram/hooks/__init__.py b/providers/src/airflow/providers/teradata/hooks/__init__.py similarity index 100% rename from airflow/providers/telegram/hooks/__init__.py rename to providers/src/airflow/providers/teradata/hooks/__init__.py diff --git a/airflow/providers/teradata/hooks/teradata.py b/providers/src/airflow/providers/teradata/hooks/teradata.py similarity index 100% rename from airflow/providers/teradata/hooks/teradata.py rename to providers/src/airflow/providers/teradata/hooks/teradata.py diff --git a/airflow/providers/telegram/operators/__init__.py b/providers/src/airflow/providers/teradata/operators/__init__.py similarity index 100% rename from airflow/providers/telegram/operators/__init__.py rename to providers/src/airflow/providers/teradata/operators/__init__.py diff --git a/airflow/providers/teradata/operators/teradata.py b/providers/src/airflow/providers/teradata/operators/teradata.py similarity index 100% rename from airflow/providers/teradata/operators/teradata.py rename to providers/src/airflow/providers/teradata/operators/teradata.py diff --git a/airflow/providers/teradata/operators/teradata_compute_cluster.py b/providers/src/airflow/providers/teradata/operators/teradata_compute_cluster.py similarity index 100% rename from airflow/providers/teradata/operators/teradata_compute_cluster.py rename to providers/src/airflow/providers/teradata/operators/teradata_compute_cluster.py diff --git a/airflow/providers/teradata/provider.yaml b/providers/src/airflow/providers/teradata/provider.yaml similarity index 100% rename from airflow/providers/teradata/provider.yaml rename to providers/src/airflow/providers/teradata/provider.yaml diff --git a/airflow/providers/teradata/transfers/__init__.py b/providers/src/airflow/providers/teradata/transfers/__init__.py similarity index 100% rename from airflow/providers/teradata/transfers/__init__.py rename to providers/src/airflow/providers/teradata/transfers/__init__.py diff --git a/airflow/providers/teradata/transfers/azure_blob_to_teradata.py b/providers/src/airflow/providers/teradata/transfers/azure_blob_to_teradata.py similarity index 100% rename from airflow/providers/teradata/transfers/azure_blob_to_teradata.py rename to providers/src/airflow/providers/teradata/transfers/azure_blob_to_teradata.py diff --git a/airflow/providers/teradata/transfers/s3_to_teradata.py b/providers/src/airflow/providers/teradata/transfers/s3_to_teradata.py similarity index 100% rename from airflow/providers/teradata/transfers/s3_to_teradata.py rename to providers/src/airflow/providers/teradata/transfers/s3_to_teradata.py diff --git a/airflow/providers/teradata/transfers/teradata_to_teradata.py b/providers/src/airflow/providers/teradata/transfers/teradata_to_teradata.py similarity index 100% rename from airflow/providers/teradata/transfers/teradata_to_teradata.py rename to providers/src/airflow/providers/teradata/transfers/teradata_to_teradata.py diff --git a/airflow/providers/teradata/triggers/__init__.py b/providers/src/airflow/providers/teradata/triggers/__init__.py similarity index 100% rename from airflow/providers/teradata/triggers/__init__.py rename to providers/src/airflow/providers/teradata/triggers/__init__.py diff --git a/airflow/providers/teradata/triggers/teradata_compute_cluster.py b/providers/src/airflow/providers/teradata/triggers/teradata_compute_cluster.py similarity index 100% rename from airflow/providers/teradata/triggers/teradata_compute_cluster.py rename to providers/src/airflow/providers/teradata/triggers/teradata_compute_cluster.py diff --git a/airflow/providers/teradata/utils/__init__.py b/providers/src/airflow/providers/teradata/utils/__init__.py similarity index 100% rename from airflow/providers/teradata/utils/__init__.py rename to providers/src/airflow/providers/teradata/utils/__init__.py diff --git a/airflow/providers/teradata/utils/constants.py b/providers/src/airflow/providers/teradata/utils/constants.py similarity index 100% rename from airflow/providers/teradata/utils/constants.py rename to providers/src/airflow/providers/teradata/utils/constants.py diff --git a/airflow/providers/trino/.latest-doc-only-change.txt b/providers/src/airflow/providers/trino/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/trino/.latest-doc-only-change.txt rename to providers/src/airflow/providers/trino/.latest-doc-only-change.txt diff --git a/airflow/providers/trino/CHANGELOG.rst b/providers/src/airflow/providers/trino/CHANGELOG.rst similarity index 100% rename from airflow/providers/trino/CHANGELOG.rst rename to providers/src/airflow/providers/trino/CHANGELOG.rst diff --git a/airflow/providers/trino/__init__.py b/providers/src/airflow/providers/trino/__init__.py similarity index 100% rename from airflow/providers/trino/__init__.py rename to providers/src/airflow/providers/trino/__init__.py diff --git a/airflow/providers/trino/assets/__init__.py b/providers/src/airflow/providers/trino/assets/__init__.py similarity index 100% rename from airflow/providers/trino/assets/__init__.py rename to providers/src/airflow/providers/trino/assets/__init__.py diff --git a/airflow/providers/trino/assets/trino.py b/providers/src/airflow/providers/trino/assets/trino.py similarity index 100% rename from airflow/providers/trino/assets/trino.py rename to providers/src/airflow/providers/trino/assets/trino.py diff --git a/airflow/providers/teradata/hooks/__init__.py b/providers/src/airflow/providers/trino/hooks/__init__.py similarity index 100% rename from airflow/providers/teradata/hooks/__init__.py rename to providers/src/airflow/providers/trino/hooks/__init__.py diff --git a/airflow/providers/trino/hooks/trino.py b/providers/src/airflow/providers/trino/hooks/trino.py similarity index 100% rename from airflow/providers/trino/hooks/trino.py rename to providers/src/airflow/providers/trino/hooks/trino.py diff --git a/airflow/providers/trino/operators/__init__.py b/providers/src/airflow/providers/trino/operators/__init__.py similarity index 100% rename from airflow/providers/trino/operators/__init__.py rename to providers/src/airflow/providers/trino/operators/__init__.py diff --git a/airflow/providers/trino/operators/trino.py b/providers/src/airflow/providers/trino/operators/trino.py similarity index 100% rename from airflow/providers/trino/operators/trino.py rename to providers/src/airflow/providers/trino/operators/trino.py diff --git a/airflow/providers/trino/provider.yaml b/providers/src/airflow/providers/trino/provider.yaml similarity index 100% rename from airflow/providers/trino/provider.yaml rename to providers/src/airflow/providers/trino/provider.yaml diff --git a/airflow/providers/teradata/operators/__init__.py b/providers/src/airflow/providers/trino/transfers/__init__.py similarity index 100% rename from airflow/providers/teradata/operators/__init__.py rename to providers/src/airflow/providers/trino/transfers/__init__.py diff --git a/airflow/providers/trino/transfers/gcs_to_trino.py b/providers/src/airflow/providers/trino/transfers/gcs_to_trino.py similarity index 100% rename from airflow/providers/trino/transfers/gcs_to_trino.py rename to providers/src/airflow/providers/trino/transfers/gcs_to_trino.py diff --git a/airflow/providers/vertica/.latest-doc-only-change.txt b/providers/src/airflow/providers/vertica/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/vertica/.latest-doc-only-change.txt rename to providers/src/airflow/providers/vertica/.latest-doc-only-change.txt diff --git a/airflow/providers/vertica/CHANGELOG.rst b/providers/src/airflow/providers/vertica/CHANGELOG.rst similarity index 100% rename from airflow/providers/vertica/CHANGELOG.rst rename to providers/src/airflow/providers/vertica/CHANGELOG.rst diff --git a/airflow/providers/vertica/__init__.py b/providers/src/airflow/providers/vertica/__init__.py similarity index 100% rename from airflow/providers/vertica/__init__.py rename to providers/src/airflow/providers/vertica/__init__.py diff --git a/airflow/providers/trino/hooks/__init__.py b/providers/src/airflow/providers/vertica/hooks/__init__.py similarity index 100% rename from airflow/providers/trino/hooks/__init__.py rename to providers/src/airflow/providers/vertica/hooks/__init__.py diff --git a/airflow/providers/vertica/hooks/vertica.py b/providers/src/airflow/providers/vertica/hooks/vertica.py similarity index 100% rename from airflow/providers/vertica/hooks/vertica.py rename to providers/src/airflow/providers/vertica/hooks/vertica.py diff --git a/airflow/providers/trino/transfers/__init__.py b/providers/src/airflow/providers/vertica/operators/__init__.py similarity index 100% rename from airflow/providers/trino/transfers/__init__.py rename to providers/src/airflow/providers/vertica/operators/__init__.py diff --git a/airflow/providers/vertica/operators/vertica.py b/providers/src/airflow/providers/vertica/operators/vertica.py similarity index 100% rename from airflow/providers/vertica/operators/vertica.py rename to providers/src/airflow/providers/vertica/operators/vertica.py diff --git a/airflow/providers/vertica/provider.yaml b/providers/src/airflow/providers/vertica/provider.yaml similarity index 100% rename from airflow/providers/vertica/provider.yaml rename to providers/src/airflow/providers/vertica/provider.yaml diff --git a/airflow/providers/weaviate/.latest-doc-only-change.txt b/providers/src/airflow/providers/weaviate/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/weaviate/.latest-doc-only-change.txt rename to providers/src/airflow/providers/weaviate/.latest-doc-only-change.txt diff --git a/airflow/providers/weaviate/CHANGELOG.rst b/providers/src/airflow/providers/weaviate/CHANGELOG.rst similarity index 100% rename from airflow/providers/weaviate/CHANGELOG.rst rename to providers/src/airflow/providers/weaviate/CHANGELOG.rst diff --git a/airflow/providers/weaviate/__init__.py b/providers/src/airflow/providers/weaviate/__init__.py similarity index 100% rename from airflow/providers/weaviate/__init__.py rename to providers/src/airflow/providers/weaviate/__init__.py diff --git a/airflow/providers/weaviate/hooks/__init__.py b/providers/src/airflow/providers/weaviate/hooks/__init__.py similarity index 100% rename from airflow/providers/weaviate/hooks/__init__.py rename to providers/src/airflow/providers/weaviate/hooks/__init__.py diff --git a/airflow/providers/weaviate/hooks/weaviate.py b/providers/src/airflow/providers/weaviate/hooks/weaviate.py similarity index 100% rename from airflow/providers/weaviate/hooks/weaviate.py rename to providers/src/airflow/providers/weaviate/hooks/weaviate.py diff --git a/airflow/providers/weaviate/operators/__init__.py b/providers/src/airflow/providers/weaviate/operators/__init__.py similarity index 100% rename from airflow/providers/weaviate/operators/__init__.py rename to providers/src/airflow/providers/weaviate/operators/__init__.py diff --git a/airflow/providers/weaviate/operators/weaviate.py b/providers/src/airflow/providers/weaviate/operators/weaviate.py similarity index 100% rename from airflow/providers/weaviate/operators/weaviate.py rename to providers/src/airflow/providers/weaviate/operators/weaviate.py diff --git a/airflow/providers/weaviate/provider.yaml b/providers/src/airflow/providers/weaviate/provider.yaml similarity index 100% rename from airflow/providers/weaviate/provider.yaml rename to providers/src/airflow/providers/weaviate/provider.yaml diff --git a/airflow/providers/yandex/.latest-doc-only-change.txt b/providers/src/airflow/providers/yandex/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/yandex/.latest-doc-only-change.txt rename to providers/src/airflow/providers/yandex/.latest-doc-only-change.txt diff --git a/airflow/providers/yandex/CHANGELOG.rst b/providers/src/airflow/providers/yandex/CHANGELOG.rst similarity index 100% rename from airflow/providers/yandex/CHANGELOG.rst rename to providers/src/airflow/providers/yandex/CHANGELOG.rst diff --git a/airflow/providers/yandex/__init__.py b/providers/src/airflow/providers/yandex/__init__.py similarity index 100% rename from airflow/providers/yandex/__init__.py rename to providers/src/airflow/providers/yandex/__init__.py diff --git a/airflow/providers/yandex/hooks/__init__.py b/providers/src/airflow/providers/yandex/hooks/__init__.py similarity index 100% rename from airflow/providers/yandex/hooks/__init__.py rename to providers/src/airflow/providers/yandex/hooks/__init__.py diff --git a/airflow/providers/yandex/hooks/dataproc.py b/providers/src/airflow/providers/yandex/hooks/dataproc.py similarity index 100% rename from airflow/providers/yandex/hooks/dataproc.py rename to providers/src/airflow/providers/yandex/hooks/dataproc.py diff --git a/airflow/providers/yandex/hooks/yandex.py b/providers/src/airflow/providers/yandex/hooks/yandex.py similarity index 100% rename from airflow/providers/yandex/hooks/yandex.py rename to providers/src/airflow/providers/yandex/hooks/yandex.py diff --git a/airflow/providers/yandex/hooks/yandexcloud_dataproc.py b/providers/src/airflow/providers/yandex/hooks/yandexcloud_dataproc.py similarity index 100% rename from airflow/providers/yandex/hooks/yandexcloud_dataproc.py rename to providers/src/airflow/providers/yandex/hooks/yandexcloud_dataproc.py diff --git a/airflow/providers/yandex/hooks/yq.py b/providers/src/airflow/providers/yandex/hooks/yq.py similarity index 100% rename from airflow/providers/yandex/hooks/yq.py rename to providers/src/airflow/providers/yandex/hooks/yq.py diff --git a/airflow/providers/yandex/links/__init__.py b/providers/src/airflow/providers/yandex/links/__init__.py similarity index 100% rename from airflow/providers/yandex/links/__init__.py rename to providers/src/airflow/providers/yandex/links/__init__.py diff --git a/airflow/providers/yandex/links/yq.py b/providers/src/airflow/providers/yandex/links/yq.py similarity index 100% rename from airflow/providers/yandex/links/yq.py rename to providers/src/airflow/providers/yandex/links/yq.py diff --git a/airflow/providers/yandex/operators/__init__.py b/providers/src/airflow/providers/yandex/operators/__init__.py similarity index 100% rename from airflow/providers/yandex/operators/__init__.py rename to providers/src/airflow/providers/yandex/operators/__init__.py diff --git a/airflow/providers/yandex/operators/dataproc.py b/providers/src/airflow/providers/yandex/operators/dataproc.py similarity index 100% rename from airflow/providers/yandex/operators/dataproc.py rename to providers/src/airflow/providers/yandex/operators/dataproc.py diff --git a/airflow/providers/yandex/operators/yandexcloud_dataproc.py b/providers/src/airflow/providers/yandex/operators/yandexcloud_dataproc.py similarity index 100% rename from airflow/providers/yandex/operators/yandexcloud_dataproc.py rename to providers/src/airflow/providers/yandex/operators/yandexcloud_dataproc.py diff --git a/airflow/providers/yandex/operators/yq.py b/providers/src/airflow/providers/yandex/operators/yq.py similarity index 100% rename from airflow/providers/yandex/operators/yq.py rename to providers/src/airflow/providers/yandex/operators/yq.py diff --git a/airflow/providers/yandex/provider.yaml b/providers/src/airflow/providers/yandex/provider.yaml similarity index 100% rename from airflow/providers/yandex/provider.yaml rename to providers/src/airflow/providers/yandex/provider.yaml diff --git a/airflow/providers/yandex/secrets/__init__.py b/providers/src/airflow/providers/yandex/secrets/__init__.py similarity index 100% rename from airflow/providers/yandex/secrets/__init__.py rename to providers/src/airflow/providers/yandex/secrets/__init__.py diff --git a/airflow/providers/yandex/secrets/lockbox.py b/providers/src/airflow/providers/yandex/secrets/lockbox.py similarity index 100% rename from airflow/providers/yandex/secrets/lockbox.py rename to providers/src/airflow/providers/yandex/secrets/lockbox.py diff --git a/airflow/providers/yandex/utils/__init__.py b/providers/src/airflow/providers/yandex/utils/__init__.py similarity index 100% rename from airflow/providers/yandex/utils/__init__.py rename to providers/src/airflow/providers/yandex/utils/__init__.py diff --git a/airflow/providers/yandex/utils/credentials.py b/providers/src/airflow/providers/yandex/utils/credentials.py similarity index 100% rename from airflow/providers/yandex/utils/credentials.py rename to providers/src/airflow/providers/yandex/utils/credentials.py diff --git a/airflow/providers/yandex/utils/defaults.py b/providers/src/airflow/providers/yandex/utils/defaults.py similarity index 100% rename from airflow/providers/yandex/utils/defaults.py rename to providers/src/airflow/providers/yandex/utils/defaults.py diff --git a/airflow/providers/yandex/utils/fields.py b/providers/src/airflow/providers/yandex/utils/fields.py similarity index 100% rename from airflow/providers/yandex/utils/fields.py rename to providers/src/airflow/providers/yandex/utils/fields.py diff --git a/airflow/providers/yandex/utils/user_agent.py b/providers/src/airflow/providers/yandex/utils/user_agent.py similarity index 100% rename from airflow/providers/yandex/utils/user_agent.py rename to providers/src/airflow/providers/yandex/utils/user_agent.py diff --git a/airflow/providers/ydb/CHANGELOG.rst b/providers/src/airflow/providers/ydb/CHANGELOG.rst similarity index 100% rename from airflow/providers/ydb/CHANGELOG.rst rename to providers/src/airflow/providers/ydb/CHANGELOG.rst diff --git a/airflow/providers/ydb/__init__.py b/providers/src/airflow/providers/ydb/__init__.py similarity index 100% rename from airflow/providers/ydb/__init__.py rename to providers/src/airflow/providers/ydb/__init__.py diff --git a/airflow/providers/ydb/hooks/__init__.py b/providers/src/airflow/providers/ydb/hooks/__init__.py similarity index 100% rename from airflow/providers/ydb/hooks/__init__.py rename to providers/src/airflow/providers/ydb/hooks/__init__.py diff --git a/airflow/providers/ydb/hooks/_vendor/__init__.py b/providers/src/airflow/providers/ydb/hooks/_vendor/__init__.py similarity index 100% rename from airflow/providers/ydb/hooks/_vendor/__init__.py rename to providers/src/airflow/providers/ydb/hooks/_vendor/__init__.py diff --git a/airflow/providers/ydb/hooks/_vendor/dbapi/__init__.py b/providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/__init__.py similarity index 100% rename from airflow/providers/ydb/hooks/_vendor/dbapi/__init__.py rename to providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/__init__.py diff --git a/airflow/providers/ydb/hooks/_vendor/dbapi/connection.py b/providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/connection.py similarity index 100% rename from airflow/providers/ydb/hooks/_vendor/dbapi/connection.py rename to providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/connection.py diff --git a/airflow/providers/ydb/hooks/_vendor/dbapi/constants.py b/providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/constants.py similarity index 100% rename from airflow/providers/ydb/hooks/_vendor/dbapi/constants.py rename to providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/constants.py diff --git a/airflow/providers/ydb/hooks/_vendor/dbapi/cursor.py b/providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/cursor.py similarity index 100% rename from airflow/providers/ydb/hooks/_vendor/dbapi/cursor.py rename to providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/cursor.py diff --git a/airflow/providers/ydb/hooks/_vendor/dbapi/errors.py b/providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/errors.py similarity index 100% rename from airflow/providers/ydb/hooks/_vendor/dbapi/errors.py rename to providers/src/airflow/providers/ydb/hooks/_vendor/dbapi/errors.py diff --git a/airflow/providers/ydb/hooks/_vendor/readme.md b/providers/src/airflow/providers/ydb/hooks/_vendor/readme.md similarity index 100% rename from airflow/providers/ydb/hooks/_vendor/readme.md rename to providers/src/airflow/providers/ydb/hooks/_vendor/readme.md diff --git a/airflow/providers/ydb/hooks/ydb.py b/providers/src/airflow/providers/ydb/hooks/ydb.py similarity index 100% rename from airflow/providers/ydb/hooks/ydb.py rename to providers/src/airflow/providers/ydb/hooks/ydb.py diff --git a/airflow/providers/ydb/operators/__init__.py b/providers/src/airflow/providers/ydb/operators/__init__.py similarity index 100% rename from airflow/providers/ydb/operators/__init__.py rename to providers/src/airflow/providers/ydb/operators/__init__.py diff --git a/airflow/providers/ydb/operators/ydb.py b/providers/src/airflow/providers/ydb/operators/ydb.py similarity index 100% rename from airflow/providers/ydb/operators/ydb.py rename to providers/src/airflow/providers/ydb/operators/ydb.py diff --git a/airflow/providers/ydb/provider.yaml b/providers/src/airflow/providers/ydb/provider.yaml similarity index 100% rename from airflow/providers/ydb/provider.yaml rename to providers/src/airflow/providers/ydb/provider.yaml diff --git a/airflow/providers/ydb/utils/__init__.py b/providers/src/airflow/providers/ydb/utils/__init__.py similarity index 100% rename from airflow/providers/ydb/utils/__init__.py rename to providers/src/airflow/providers/ydb/utils/__init__.py diff --git a/airflow/providers/ydb/utils/credentials.py b/providers/src/airflow/providers/ydb/utils/credentials.py similarity index 100% rename from airflow/providers/ydb/utils/credentials.py rename to providers/src/airflow/providers/ydb/utils/credentials.py diff --git a/airflow/providers/ydb/utils/defaults.py b/providers/src/airflow/providers/ydb/utils/defaults.py similarity index 100% rename from airflow/providers/ydb/utils/defaults.py rename to providers/src/airflow/providers/ydb/utils/defaults.py diff --git a/airflow/providers/zendesk/.latest-doc-only-change.txt b/providers/src/airflow/providers/zendesk/.latest-doc-only-change.txt similarity index 100% rename from airflow/providers/zendesk/.latest-doc-only-change.txt rename to providers/src/airflow/providers/zendesk/.latest-doc-only-change.txt diff --git a/airflow/providers/zendesk/CHANGELOG.rst b/providers/src/airflow/providers/zendesk/CHANGELOG.rst similarity index 100% rename from airflow/providers/zendesk/CHANGELOG.rst rename to providers/src/airflow/providers/zendesk/CHANGELOG.rst diff --git a/airflow/providers/zendesk/__init__.py b/providers/src/airflow/providers/zendesk/__init__.py similarity index 100% rename from airflow/providers/zendesk/__init__.py rename to providers/src/airflow/providers/zendesk/__init__.py diff --git a/airflow/providers/vertica/hooks/__init__.py b/providers/src/airflow/providers/zendesk/hooks/__init__.py similarity index 100% rename from airflow/providers/vertica/hooks/__init__.py rename to providers/src/airflow/providers/zendesk/hooks/__init__.py diff --git a/airflow/providers/zendesk/hooks/zendesk.py b/providers/src/airflow/providers/zendesk/hooks/zendesk.py similarity index 100% rename from airflow/providers/zendesk/hooks/zendesk.py rename to providers/src/airflow/providers/zendesk/hooks/zendesk.py diff --git a/airflow/providers/zendesk/provider.yaml b/providers/src/airflow/providers/zendesk/provider.yaml similarity index 100% rename from airflow/providers/zendesk/provider.yaml rename to providers/src/airflow/providers/zendesk/provider.yaml diff --git a/tests/_internals/__init__.py b/providers/tests/__init__.py similarity index 100% rename from tests/_internals/__init__.py rename to providers/tests/__init__.py diff --git a/tests/integration/providers/__init__.py b/providers/tests/airbyte/__init__.py similarity index 100% rename from tests/integration/providers/__init__.py rename to providers/tests/airbyte/__init__.py diff --git a/tests/integration/providers/apache/__init__.py b/providers/tests/airbyte/hooks/__init__.py similarity index 100% rename from tests/integration/providers/apache/__init__.py rename to providers/tests/airbyte/hooks/__init__.py diff --git a/tests/providers/airbyte/hooks/test_airbyte.py b/providers/tests/airbyte/hooks/test_airbyte.py similarity index 100% rename from tests/providers/airbyte/hooks/test_airbyte.py rename to providers/tests/airbyte/hooks/test_airbyte.py diff --git a/tests/integration/providers/apache/cassandra/__init__.py b/providers/tests/airbyte/operators/__init__.py similarity index 100% rename from tests/integration/providers/apache/cassandra/__init__.py rename to providers/tests/airbyte/operators/__init__.py diff --git a/tests/providers/airbyte/operators/test_airbyte.py b/providers/tests/airbyte/operators/test_airbyte.py similarity index 100% rename from tests/providers/airbyte/operators/test_airbyte.py rename to providers/tests/airbyte/operators/test_airbyte.py diff --git a/tests/integration/providers/apache/cassandra/hooks/__init__.py b/providers/tests/airbyte/sensors/__init__.py similarity index 100% rename from tests/integration/providers/apache/cassandra/hooks/__init__.py rename to providers/tests/airbyte/sensors/__init__.py diff --git a/tests/providers/airbyte/sensors/test_airbyte.py b/providers/tests/airbyte/sensors/test_airbyte.py similarity index 100% rename from tests/providers/airbyte/sensors/test_airbyte.py rename to providers/tests/airbyte/sensors/test_airbyte.py diff --git a/tests/integration/providers/apache/drill/__init__.py b/providers/tests/airbyte/triggers/__init__.py similarity index 100% rename from tests/integration/providers/apache/drill/__init__.py rename to providers/tests/airbyte/triggers/__init__.py diff --git a/tests/providers/airbyte/triggers/test_airbyte.py b/providers/tests/airbyte/triggers/test_airbyte.py similarity index 100% rename from tests/providers/airbyte/triggers/test_airbyte.py rename to providers/tests/airbyte/triggers/test_airbyte.py diff --git a/tests/integration/providers/apache/drill/hooks/__init__.py b/providers/tests/alibaba/__init__.py similarity index 100% rename from tests/integration/providers/apache/drill/hooks/__init__.py rename to providers/tests/alibaba/__init__.py diff --git a/tests/integration/providers/apache/drill/operators/__init__.py b/providers/tests/alibaba/cloud/__init__.py similarity index 100% rename from tests/integration/providers/apache/drill/operators/__init__.py rename to providers/tests/alibaba/cloud/__init__.py diff --git a/tests/integration/providers/apache/hive/__init__.py b/providers/tests/alibaba/cloud/hooks/__init__.py similarity index 100% rename from tests/integration/providers/apache/hive/__init__.py rename to providers/tests/alibaba/cloud/hooks/__init__.py diff --git a/tests/providers/alibaba/cloud/hooks/test_analyticdb_spark.py b/providers/tests/alibaba/cloud/hooks/test_analyticdb_spark.py similarity index 99% rename from tests/providers/alibaba/cloud/hooks/test_analyticdb_spark.py rename to providers/tests/alibaba/cloud/hooks/test_analyticdb_spark.py index bf38a3f7ca666..b3e9100e2e59d 100644 --- a/tests/providers/alibaba/cloud/hooks/test_analyticdb_spark.py +++ b/providers/tests/alibaba/cloud/hooks/test_analyticdb_spark.py @@ -34,7 +34,8 @@ ) from airflow.providers.alibaba.cloud.hooks.analyticdb_spark import AnalyticDBSparkHook -from tests.providers.alibaba.cloud.utils.analyticdb_spark_mock import mock_adb_spark_hook_default_project_id + +from providers.tests.alibaba.cloud.utils.analyticdb_spark_mock import mock_adb_spark_hook_default_project_id ADB_SPARK_STRING = "airflow.providers.alibaba.cloud.hooks.analyticdb_spark.{}" MOCK_ADB_SPARK_CONN_ID = "mock_id" diff --git a/tests/providers/alibaba/cloud/hooks/test_oss.py b/providers/tests/alibaba/cloud/hooks/test_oss.py similarity index 99% rename from tests/providers/alibaba/cloud/hooks/test_oss.py rename to providers/tests/alibaba/cloud/hooks/test_oss.py index 1c47aa10c9741..2bb53dddece5c 100644 --- a/tests/providers/alibaba/cloud/hooks/test_oss.py +++ b/providers/tests/alibaba/cloud/hooks/test_oss.py @@ -20,7 +20,8 @@ from unittest import mock from airflow.providers.alibaba.cloud.hooks.oss import OSSHook -from tests.providers.alibaba.cloud.utils.oss_mock import mock_oss_hook_default_project_id + +from providers.tests.alibaba.cloud.utils.oss_mock import mock_oss_hook_default_project_id OSS_STRING = "airflow.providers.alibaba.cloud.hooks.oss.{}" MOCK_OSS_CONN_ID = "mock_id" diff --git a/tests/integration/providers/apache/hive/transfers/__init__.py b/providers/tests/alibaba/cloud/log/__init__.py similarity index 100% rename from tests/integration/providers/apache/hive/transfers/__init__.py rename to providers/tests/alibaba/cloud/log/__init__.py diff --git a/tests/providers/alibaba/cloud/log/test_oss_task_handler.py b/providers/tests/alibaba/cloud/log/test_oss_task_handler.py similarity index 98% rename from tests/providers/alibaba/cloud/log/test_oss_task_handler.py rename to providers/tests/alibaba/cloud/log/test_oss_task_handler.py index b17d953410117..18abe57aa09b7 100644 --- a/tests/providers/alibaba/cloud/log/test_oss_task_handler.py +++ b/providers/tests/alibaba/cloud/log/test_oss_task_handler.py @@ -26,8 +26,9 @@ from airflow.providers.alibaba.cloud.log.oss_task_handler import OSSTaskHandler from airflow.utils.state import TaskInstanceState from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/integration/providers/apache/kafka/__init__.py b/providers/tests/alibaba/cloud/operators/__init__.py similarity index 100% rename from tests/integration/providers/apache/kafka/__init__.py rename to providers/tests/alibaba/cloud/operators/__init__.py diff --git a/tests/providers/alibaba/cloud/operators/test_analyticdb_spark.py b/providers/tests/alibaba/cloud/operators/test_analyticdb_spark.py similarity index 100% rename from tests/providers/alibaba/cloud/operators/test_analyticdb_spark.py rename to providers/tests/alibaba/cloud/operators/test_analyticdb_spark.py diff --git a/tests/providers/alibaba/cloud/operators/test_oss.py b/providers/tests/alibaba/cloud/operators/test_oss.py similarity index 100% rename from tests/providers/alibaba/cloud/operators/test_oss.py rename to providers/tests/alibaba/cloud/operators/test_oss.py diff --git a/tests/integration/providers/apache/kafka/hooks/__init__.py b/providers/tests/alibaba/cloud/sensors/__init__.py similarity index 100% rename from tests/integration/providers/apache/kafka/hooks/__init__.py rename to providers/tests/alibaba/cloud/sensors/__init__.py diff --git a/tests/providers/alibaba/cloud/sensors/test_analyticdb_spark.py b/providers/tests/alibaba/cloud/sensors/test_analyticdb_spark.py similarity index 100% rename from tests/providers/alibaba/cloud/sensors/test_analyticdb_spark.py rename to providers/tests/alibaba/cloud/sensors/test_analyticdb_spark.py diff --git a/tests/providers/alibaba/cloud/sensors/test_oss_key.py b/providers/tests/alibaba/cloud/sensors/test_oss_key.py similarity index 100% rename from tests/providers/alibaba/cloud/sensors/test_oss_key.py rename to providers/tests/alibaba/cloud/sensors/test_oss_key.py diff --git a/tests/integration/providers/apache/kafka/operators/__init__.py b/providers/tests/alibaba/cloud/utils/__init__.py similarity index 100% rename from tests/integration/providers/apache/kafka/operators/__init__.py rename to providers/tests/alibaba/cloud/utils/__init__.py diff --git a/tests/providers/alibaba/cloud/utils/analyticdb_spark_mock.py b/providers/tests/alibaba/cloud/utils/analyticdb_spark_mock.py similarity index 100% rename from tests/providers/alibaba/cloud/utils/analyticdb_spark_mock.py rename to providers/tests/alibaba/cloud/utils/analyticdb_spark_mock.py diff --git a/tests/providers/alibaba/cloud/utils/oss_mock.py b/providers/tests/alibaba/cloud/utils/oss_mock.py similarity index 100% rename from tests/providers/alibaba/cloud/utils/oss_mock.py rename to providers/tests/alibaba/cloud/utils/oss_mock.py diff --git a/tests/providers/alibaba/cloud/utils/test_utils.py b/providers/tests/alibaba/cloud/utils/test_utils.py similarity index 100% rename from tests/providers/alibaba/cloud/utils/test_utils.py rename to providers/tests/alibaba/cloud/utils/test_utils.py diff --git a/tests/integration/providers/apache/kafka/sensors/__init__.py b/providers/tests/amazon/__init__.py similarity index 100% rename from tests/integration/providers/apache/kafka/sensors/__init__.py rename to providers/tests/amazon/__init__.py diff --git a/tests/providers/amazon/aws/.gitignore b/providers/tests/amazon/aws/.gitignore similarity index 100% rename from tests/providers/amazon/aws/.gitignore rename to providers/tests/amazon/aws/.gitignore diff --git a/tests/integration/providers/apache/kafka/triggers/__init__.py b/providers/tests/amazon/aws/__init__.py similarity index 100% rename from tests/integration/providers/apache/kafka/triggers/__init__.py rename to providers/tests/amazon/aws/__init__.py diff --git a/tests/integration/providers/apache/pinot/__init__.py b/providers/tests/amazon/aws/assets/__init__.py similarity index 100% rename from tests/integration/providers/apache/pinot/__init__.py rename to providers/tests/amazon/aws/assets/__init__.py diff --git a/tests/providers/amazon/aws/assets/test_s3.py b/providers/tests/amazon/aws/assets/test_s3.py similarity index 100% rename from tests/providers/amazon/aws/assets/test_s3.py rename to providers/tests/amazon/aws/assets/test_s3.py diff --git a/tests/integration/providers/apache/pinot/hooks/__init__.py b/providers/tests/amazon/aws/auth_manager/__init__.py similarity index 100% rename from tests/integration/providers/apache/pinot/hooks/__init__.py rename to providers/tests/amazon/aws/auth_manager/__init__.py diff --git a/tests/integration/providers/google/__init__.py b/providers/tests/amazon/aws/auth_manager/avp/__init__.py similarity index 100% rename from tests/integration/providers/google/__init__.py rename to providers/tests/amazon/aws/auth_manager/avp/__init__.py diff --git a/tests/providers/amazon/aws/auth_manager/avp/test_entities.py b/providers/tests/amazon/aws/auth_manager/avp/test_entities.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/avp/test_entities.py rename to providers/tests/amazon/aws/auth_manager/avp/test_entities.py diff --git a/tests/providers/amazon/aws/auth_manager/avp/test_facade.py b/providers/tests/amazon/aws/auth_manager/avp/test_facade.py similarity index 98% rename from tests/providers/amazon/aws/auth_manager/avp/test_facade.py rename to providers/tests/amazon/aws/auth_manager/avp/test_facade.py index 5c632ac1ba8e7..3d2a0195039df 100644 --- a/tests/providers/amazon/aws/auth_manager/avp/test_facade.py +++ b/providers/tests/amazon/aws/auth_manager/avp/test_facade.py @@ -27,7 +27,8 @@ from airflow.providers.amazon.aws.auth_manager.avp.facade import AwsAuthManagerAmazonVerifiedPermissionsFacade from airflow.providers.amazon.aws.auth_manager.user import AwsAuthManagerUser from airflow.utils.helpers import prune_dict -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars if TYPE_CHECKING: from airflow.auth.managers.base_auth_manager import ResourceMethod @@ -311,8 +312,8 @@ def test_get_batch_is_authorized_single_result_unsuccessful(self, facade): user=test_user, ) - def test_is_policy_store_schema_up_to_date_when_schema_up_to_date(self, facade, airflow_root_path): - schema_path = airflow_root_path.joinpath( + def test_is_policy_store_schema_up_to_date_when_schema_up_to_date(self, facade, providers_src_folder): + schema_path = providers_src_folder.joinpath( "airflow", "providers", "amazon", "aws", "auth_manager", "avp", "schema.json" ).resolve() with open(schema_path) as schema_file: @@ -322,8 +323,8 @@ def test_is_policy_store_schema_up_to_date_when_schema_up_to_date(self, facade, assert facade.is_policy_store_schema_up_to_date() - def test_is_policy_store_schema_up_to_date_when_schema_is_modified(self, facade, airflow_root_path): - schema_path = airflow_root_path.joinpath( + def test_is_policy_store_schema_up_to_date_when_schema_is_modified(self, facade, providers_src_folder): + schema_path = providers_src_folder.joinpath( "airflow", "providers", "amazon", "aws", "auth_manager", "avp", "schema.json" ).resolve() with open(schema_path) as schema_file: diff --git a/tests/integration/providers/google/cloud/__init__.py b/providers/tests/amazon/aws/auth_manager/cli/__init__.py similarity index 100% rename from tests/integration/providers/google/cloud/__init__.py rename to providers/tests/amazon/aws/auth_manager/cli/__init__.py diff --git a/tests/providers/amazon/aws/auth_manager/cli/test_avp_commands.py b/providers/tests/amazon/aws/auth_manager/cli/test_avp_commands.py similarity index 97% rename from tests/providers/amazon/aws/auth_manager/cli/test_avp_commands.py rename to providers/tests/amazon/aws/auth_manager/cli/test_avp_commands.py index f285beca8af1b..6122079fee4cb 100644 --- a/tests/providers/amazon/aws/auth_manager/cli/test_avp_commands.py +++ b/providers/tests/amazon/aws/auth_manager/cli/test_avp_commands.py @@ -23,8 +23,9 @@ from airflow.cli import cli_parser from airflow.providers.amazon.aws.auth_manager.cli.avp_commands import init_avp, update_schema -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS +from dev.tests_common.test_utils.config import conf_vars mock_boto3 = Mock() diff --git a/tests/providers/amazon/aws/auth_manager/cli/test_definition.py b/providers/tests/amazon/aws/auth_manager/cli/test_definition.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/cli/test_definition.py rename to providers/tests/amazon/aws/auth_manager/cli/test_definition.py diff --git a/tests/integration/providers/google/cloud/transfers/__init__.py b/providers/tests/amazon/aws/auth_manager/security_manager/__init__.py similarity index 100% rename from tests/integration/providers/google/cloud/transfers/__init__.py rename to providers/tests/amazon/aws/auth_manager/security_manager/__init__.py diff --git a/tests/providers/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py b/providers/tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py similarity index 96% rename from tests/providers/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py rename to providers/tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py index ebb452fb1afb5..b6071aac955c6 100644 --- a/tests/providers/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py +++ b/providers/tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py @@ -21,7 +21,7 @@ import pytest from flask import Flask -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error python3_saml = pytest.importorskip("python3-saml") diff --git a/tests/providers/amazon/aws/auth_manager/test_aws_auth_manager.py b/providers/tests/amazon/aws/auth_manager/test_aws_auth_manager.py similarity index 97% rename from tests/providers/amazon/aws/auth_manager/test_aws_auth_manager.py rename to providers/tests/amazon/aws/auth_manager/test_aws_auth_manager.py index d827ba3ff0e6d..47e8a4cbcb26d 100644 --- a/tests/providers/amazon/aws/auth_manager/test_aws_auth_manager.py +++ b/providers/tests/amazon/aws/auth_manager/test_aws_auth_manager.py @@ -38,9 +38,10 @@ ) from airflow.www import app as application from airflow.www.extensions.init_appbuilder import init_appbuilder -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_2_9_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.www import check_content_in_response + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.www import check_content_in_response try: from airflow.auth.managers.models.resource_details import ( @@ -61,6 +62,7 @@ else: raise + if TYPE_CHECKING: from airflow.auth.managers.base_auth_manager import ResourceMethod from airflow.auth.managers.models.resource_details import AssetDetails @@ -146,13 +148,17 @@ def client_admin(): ("aws_auth_manager", "avp_policy_store_id"): "avp_policy_store_id", } ): - with patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" - ) as mock_parser, patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" - ) as mock_init_saml_auth, patch( - "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" - ) as mock_is_policy_store_schema_up_to_date: + with ( + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" + ) as mock_parser, + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" + ) as mock_init_saml_auth, + patch( + "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" + ) as mock_is_policy_store_schema_up_to_date, + ): mock_parser.parse_remote.return_value = SAML_METADATA_PARSED mock_is_policy_store_schema_up_to_date.return_value = True diff --git a/tests/providers/amazon/aws/auth_manager/test_constants.py b/providers/tests/amazon/aws/auth_manager/test_constants.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/test_constants.py rename to providers/tests/amazon/aws/auth_manager/test_constants.py diff --git a/tests/providers/amazon/aws/auth_manager/test_user.py b/providers/tests/amazon/aws/auth_manager/test_user.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/test_user.py rename to providers/tests/amazon/aws/auth_manager/test_user.py diff --git a/tests/integration/providers/microsoft/__init__.py b/providers/tests/amazon/aws/auth_manager/views/__init__.py similarity index 100% rename from tests/integration/providers/microsoft/__init__.py rename to providers/tests/amazon/aws/auth_manager/views/__init__.py diff --git a/tests/providers/amazon/aws/auth_manager/views/test_auth.py b/providers/tests/amazon/aws/auth_manager/views/test_auth.py similarity index 80% rename from tests/providers/amazon/aws/auth_manager/views/test_auth.py rename to providers/tests/amazon/aws/auth_manager/views/test_auth.py index 05d2fb84b51cf..9b2eec69188be 100644 --- a/tests/providers/amazon/aws/auth_manager/views/test_auth.py +++ b/providers/tests/amazon/aws/auth_manager/views/test_auth.py @@ -23,8 +23,9 @@ from airflow.exceptions import AirflowException from airflow.www import app as application -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.config import conf_vars pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Test requires Airflow 2.9+"), @@ -62,11 +63,14 @@ def aws_app(): ("aws_auth_manager", "saml_metadata_url"): SAML_METADATA_URL, } ): - with patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" - ) as mock_parser, patch( - "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" - ) as mock_is_policy_store_schema_up_to_date: + with ( + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" + ) as mock_parser, + patch( + "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" + ) as mock_is_policy_store_schema_up_to_date, + ): mock_is_policy_store_schema_up_to_date.return_value = True mock_parser.parse_remote.return_value = SAML_METADATA_PARSED return application.create_app(testing=True, config={"WTF_CSRF_ENABLED": False}) @@ -102,13 +106,17 @@ def test_login_callback_set_user_in_session(self): ("aws_auth_manager", "saml_metadata_url"): SAML_METADATA_URL, } ): - with patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" - ) as mock_parser, patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" - ) as mock_init_saml_auth, patch( - "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" - ) as mock_is_policy_store_schema_up_to_date: + with ( + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" + ) as mock_parser, + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" + ) as mock_init_saml_auth, + patch( + "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" + ) as mock_is_policy_store_schema_up_to_date, + ): mock_is_policy_store_schema_up_to_date.return_value = True mock_parser.parse_remote.return_value = SAML_METADATA_PARSED @@ -140,13 +148,17 @@ def test_login_callback_raise_exception_if_errors(self): ("aws_auth_manager", "saml_metadata_url"): SAML_METADATA_URL, } ): - with patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" - ) as mock_parser, patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" - ) as mock_init_saml_auth, patch( - "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" - ) as mock_is_policy_store_schema_up_to_date: + with ( + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" + ) as mock_parser, + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" + ) as mock_init_saml_auth, + patch( + "airflow.providers.amazon.aws.auth_manager.avp.facade.AwsAuthManagerAmazonVerifiedPermissionsFacade.is_policy_store_schema_up_to_date" + ) as mock_is_policy_store_schema_up_to_date, + ): mock_is_policy_store_schema_up_to_date.return_value = True mock_parser.parse_remote.return_value = SAML_METADATA_PARSED diff --git a/tests/integration/providers/microsoft/mssql/__init__.py b/providers/tests/amazon/aws/config_templates/__init__.py similarity index 100% rename from tests/integration/providers/microsoft/mssql/__init__.py rename to providers/tests/amazon/aws/config_templates/__init__.py diff --git a/tests/providers/amazon/aws/config_templates/args.json b/providers/tests/amazon/aws/config_templates/args.json similarity index 100% rename from tests/providers/amazon/aws/config_templates/args.json rename to providers/tests/amazon/aws/config_templates/args.json diff --git a/tests/providers/amazon/aws/config_templates/job.j2.json b/providers/tests/amazon/aws/config_templates/job.j2.json similarity index 100% rename from tests/providers/amazon/aws/config_templates/job.j2.json rename to providers/tests/amazon/aws/config_templates/job.j2.json diff --git a/tests/providers/amazon/aws/config_templates/steps.j2.json b/providers/tests/amazon/aws/config_templates/steps.j2.json similarity index 100% rename from tests/providers/amazon/aws/config_templates/steps.j2.json rename to providers/tests/amazon/aws/config_templates/steps.j2.json diff --git a/tests/providers/amazon/aws/config_templates/steps.json b/providers/tests/amazon/aws/config_templates/steps.json similarity index 100% rename from tests/providers/amazon/aws/config_templates/steps.json rename to providers/tests/amazon/aws/config_templates/steps.json diff --git a/tests/integration/providers/microsoft/mssql/hooks/__init__.py b/providers/tests/amazon/aws/deferrable/__init__.py similarity index 100% rename from tests/integration/providers/microsoft/mssql/hooks/__init__.py rename to providers/tests/amazon/aws/deferrable/__init__.py diff --git a/tests/integration/providers/mongo/__init__.py b/providers/tests/amazon/aws/deferrable/hooks/__init__.py similarity index 100% rename from tests/integration/providers/mongo/__init__.py rename to providers/tests/amazon/aws/deferrable/hooks/__init__.py diff --git a/tests/integration/providers/mongo/sensors/__init__.py b/providers/tests/amazon/aws/executors/__init__.py similarity index 100% rename from tests/integration/providers/mongo/sensors/__init__.py rename to providers/tests/amazon/aws/executors/__init__.py diff --git a/tests/integration/providers/openlineage/__init__.py b/providers/tests/amazon/aws/executors/batch/__init__.py similarity index 100% rename from tests/integration/providers/openlineage/__init__.py rename to providers/tests/amazon/aws/executors/batch/__init__.py diff --git a/tests/providers/amazon/aws/executors/batch/test_batch_executor.py b/providers/tests/amazon/aws/executors/batch/test_batch_executor.py similarity index 99% rename from tests/providers/amazon/aws/executors/batch/test_batch_executor.py rename to providers/tests/amazon/aws/executors/batch/test_batch_executor.py index 8b81e4a966a45..e7037bd16c85e 100644 --- a/tests/providers/amazon/aws/executors/batch/test_batch_executor.py +++ b/providers/tests/amazon/aws/executors/batch/test_batch_executor.py @@ -45,8 +45,9 @@ from airflow.utils.helpers import convert_camel_to_snake from airflow.utils.state import State from airflow.version import version as airflow_version_str -from tests.conftest import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES -from tests.test_utils.config import conf_vars + +from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES +from dev.tests_common.test_utils.config import conf_vars airflow_version = VersionInfo(*map(int, airflow_version_str.split(".")[:3])) ARN1 = "arn1" @@ -652,9 +653,9 @@ def teardown_method(self) -> None: ) def test_validate_config_defaults(self): """Assert that the defaults stated in the config.yml file match those in utils.CONFIG_DEFAULTS.""" - curr_dir = os.path.dirname(os.path.abspath(__file__)) - executor_path = "aws/executors/batch" - config_filename = curr_dir.replace("tests", "airflow").replace(executor_path, "provider.yaml") + from airflow.providers.amazon import __file__ as provider_path + + config_filename = os.path.join(os.path.dirname(provider_path), "provider.yaml") with open(config_filename) as config: options = yaml.safe_load(config)["config"][CONFIG_GROUP_NAME]["options"] diff --git a/tests/integration/providers/openlineage/operators/__init__.py b/providers/tests/amazon/aws/executors/ecs/__init__.py similarity index 100% rename from tests/integration/providers/openlineage/operators/__init__.py rename to providers/tests/amazon/aws/executors/ecs/__init__.py diff --git a/tests/providers/amazon/aws/executors/ecs/test_ecs_executor.py b/providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py similarity index 99% rename from tests/providers/amazon/aws/executors/ecs/test_ecs_executor.py rename to providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py index 51e3c17934900..50cdb580382f3 100644 --- a/tests/providers/amazon/aws/executors/ecs/test_ecs_executor.py +++ b/providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py @@ -56,9 +56,10 @@ from airflow.utils.state import State, TaskInstanceState from airflow.utils.timezone import utcnow from airflow.version import version as airflow_version_str -from tests.conftest import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test @@ -1219,9 +1220,9 @@ def test_validate_config_defaults(self): airflow sources, not when airflow is installed from packages, because airflow installed from packages will not have the provider.yml file. """ - curr_dir = os.path.dirname(os.path.abspath(__file__)) - executor_path = "aws/executors/ecs" - config_filename = curr_dir.replace("tests", "airflow").replace(executor_path, "provider.yaml") + from airflow.providers.amazon import __file__ as provider_path + + config_filename = os.path.join(os.path.dirname(provider_path), "provider.yaml") with open(config_filename) as config: options = yaml.safe_load(config)["config"][CONFIG_GROUP_NAME]["options"] diff --git a/tests/integration/providers/qdrant/__init__.py b/providers/tests/amazon/aws/executors/utils/__init__.py similarity index 100% rename from tests/integration/providers/qdrant/__init__.py rename to providers/tests/amazon/aws/executors/utils/__init__.py diff --git a/tests/providers/amazon/aws/executors/utils/test_exponential_backoff_retry.py b/providers/tests/amazon/aws/executors/utils/test_exponential_backoff_retry.py similarity index 100% rename from tests/providers/amazon/aws/executors/utils/test_exponential_backoff_retry.py rename to providers/tests/amazon/aws/executors/utils/test_exponential_backoff_retry.py diff --git a/tests/integration/providers/qdrant/hooks/__init__.py b/providers/tests/amazon/aws/fs/__init__.py similarity index 100% rename from tests/integration/providers/qdrant/hooks/__init__.py rename to providers/tests/amazon/aws/fs/__init__.py diff --git a/tests/providers/amazon/aws/fs/test_s3.py b/providers/tests/amazon/aws/fs/test_s3.py similarity index 100% rename from tests/providers/amazon/aws/fs/test_s3.py rename to providers/tests/amazon/aws/fs/test_s3.py diff --git a/tests/integration/providers/qdrant/operators/__init__.py b/providers/tests/amazon/aws/hooks/__init__.py similarity index 100% rename from tests/integration/providers/qdrant/operators/__init__.py rename to providers/tests/amazon/aws/hooks/__init__.py diff --git a/tests/providers/amazon/aws/hooks/test_appflow.py b/providers/tests/amazon/aws/hooks/test_appflow.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_appflow.py rename to providers/tests/amazon/aws/hooks/test_appflow.py diff --git a/tests/providers/amazon/aws/hooks/test_athena.py b/providers/tests/amazon/aws/hooks/test_athena.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_athena.py rename to providers/tests/amazon/aws/hooks/test_athena.py diff --git a/tests/providers/amazon/aws/hooks/test_athena_sql.py b/providers/tests/amazon/aws/hooks/test_athena_sql.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_athena_sql.py rename to providers/tests/amazon/aws/hooks/test_athena_sql.py diff --git a/tests/providers/amazon/aws/hooks/test_base_aws.py b/providers/tests/amazon/aws/hooks/test_base_aws.py similarity index 98% rename from tests/providers/amazon/aws/hooks/test_base_aws.py rename to providers/tests/amazon/aws/hooks/test_base_aws.py index 0957e6a928aae..c58993d748f2d 100644 --- a/tests/providers/amazon/aws/hooks/test_base_aws.py +++ b/providers/tests/amazon/aws/hooks/test_base_aws.py @@ -49,7 +49,8 @@ resolve_session_factory, ) from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytest.importorskip("aiobotocore") @@ -140,7 +141,7 @@ def mock_conn(request): class TestSessionFactory: @conf_vars( - {("aws", "session_factory"): "tests.providers.amazon.aws.hooks.test_base_aws.CustomSessionFactory"} + {("aws", "session_factory"): "providers.tests.amazon.aws.hooks.test_base_aws.CustomSessionFactory"} ) def test_resolve_session_factory_class(self): cls = resolve_session_factory() @@ -545,13 +546,13 @@ def import_mock(name, *args): return mock_id_token_credentials return orig_import(name, *args) - with mock.patch("builtins.__import__", side_effect=import_mock), mock.patch.dict( - "os.environ", AIRFLOW_CONN_AWS_DEFAULT=mock_connection.get_uri() - ), mock.patch("airflow.providers.amazon.aws.hooks.base_aws.boto3") as mock_boto3, mock.patch( - "airflow.providers.amazon.aws.hooks.base_aws.botocore" - ) as mock_botocore, mock.patch( - "airflow.providers.amazon.aws.hooks.base_aws.botocore.session" - ) as mock_session: + with ( + mock.patch("builtins.__import__", side_effect=import_mock), + mock.patch.dict("os.environ", AIRFLOW_CONN_AWS_DEFAULT=mock_connection.get_uri()), + mock.patch("airflow.providers.amazon.aws.hooks.base_aws.boto3") as mock_boto3, + mock.patch("airflow.providers.amazon.aws.hooks.base_aws.botocore") as mock_botocore, + mock.patch("airflow.providers.amazon.aws.hooks.base_aws.botocore.session") as mock_session, + ): hook = AwsBaseHook(aws_conn_id="aws_default", client_type="airflow_test") credentials_from_hook = hook.get_credentials() @@ -701,12 +702,14 @@ def mock_assume_role_with_saml(**kwargs): } return sts_response - with mock.patch("builtins.__import__", side_effect=import_mock), mock.patch( - "airflow.providers.amazon.aws.hooks.base_aws.requests.Session.get" - ) as mock_get, mock.patch( - "airflow.providers.amazon.aws.hooks.base_aws.BaseSessionFactory._create_basic_session", - spec=boto3.session.Session, - ) as mocked_basic_session: + with ( + mock.patch("builtins.__import__", side_effect=import_mock), + mock.patch("airflow.providers.amazon.aws.hooks.base_aws.requests.Session.get") as mock_get, + mock.patch( + "airflow.providers.amazon.aws.hooks.base_aws.BaseSessionFactory._create_basic_session", + spec=boto3.session.Session, + ) as mocked_basic_session, + ): mocked_basic_session.return_value.region_name = "us-east-2" mock_client = mocked_basic_session.return_value.client mock_client.return_value.assume_role_with_saml.side_effect = mock_assume_role_with_saml diff --git a/tests/providers/amazon/aws/hooks/test_batch_client.py b/providers/tests/amazon/aws/hooks/test_batch_client.py similarity index 98% rename from tests/providers/amazon/aws/hooks/test_batch_client.py rename to providers/tests/amazon/aws/hooks/test_batch_client.py index 83385878b57fe..98b89de0a60b4 100644 --- a/tests/providers/amazon/aws/hooks/test_batch_client.py +++ b/providers/tests/amazon/aws/hooks/test_batch_client.py @@ -126,13 +126,11 @@ def test_wait_for_job_with_logs(self): thread_stop = mock.Mock(side_effect=lambda: time.sleep(2)) thread_join = mock.Mock(side_effect=lambda: time.sleep(2)) - with mock.patch.object( - batch_log_fetcher, "start", thread_start - ) as mock_fetcher_start, mock.patch.object( - batch_log_fetcher, "stop", thread_stop - ) as mock_fetcher_stop, mock.patch.object( - batch_log_fetcher, "join", thread_join - ) as mock_fetcher_join: + with ( + mock.patch.object(batch_log_fetcher, "start", thread_start) as mock_fetcher_start, + mock.patch.object(batch_log_fetcher, "stop", thread_stop) as mock_fetcher_stop, + mock.patch.object(batch_log_fetcher, "join", thread_join) as mock_fetcher_join, + ): self.batch_client.wait_for_job(JOB_ID, get_batch_log_fetcher=mock_get_batch_log_fetcher) mock_get_batch_log_fetcher.assert_called_with(JOB_ID) mock_fetcher_start.assert_called_once() diff --git a/tests/providers/amazon/aws/hooks/test_batch_waiters.py b/providers/tests/amazon/aws/hooks/test_batch_waiters.py similarity index 97% rename from tests/providers/amazon/aws/hooks/test_batch_waiters.py rename to providers/tests/amazon/aws/hooks/test_batch_waiters.py index 72f2061b902ca..d67b25d07f2a6 100644 --- a/tests/providers/amazon/aws/hooks/test_batch_waiters.py +++ b/providers/tests/amazon/aws/hooks/test_batch_waiters.py @@ -157,13 +157,12 @@ def test_wait_for_job_with_cloudwatch_logs(self): thread_stop = mock.Mock(side_effect=lambda: time.sleep(2)) thread_join = mock.Mock(side_effect=lambda: time.sleep(2)) - with mock.patch.object(self.batch_waiters, "get_waiter") as mock_get_waiter, mock.patch.object( - batch_log_fetcher, "start", thread_start - ) as mock_fetcher_start, mock.patch.object( - batch_log_fetcher, "stop", thread_stop - ) as mock_fetcher_stop, mock.patch.object( - batch_log_fetcher, "join", thread_join - ) as mock_fetcher_join: + with ( + mock.patch.object(self.batch_waiters, "get_waiter") as mock_get_waiter, + mock.patch.object(batch_log_fetcher, "start", thread_start) as mock_fetcher_start, + mock.patch.object(batch_log_fetcher, "stop", thread_stop) as mock_fetcher_stop, + mock.patch.object(batch_log_fetcher, "join", thread_join) as mock_fetcher_join, + ): # Run the wait_for_job method self.batch_waiters.wait_for_job(self.job_id, get_batch_log_fetcher=mock_get_batch_log_fetcher) diff --git a/tests/providers/amazon/aws/hooks/test_bedrock.py b/providers/tests/amazon/aws/hooks/test_bedrock.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_bedrock.py rename to providers/tests/amazon/aws/hooks/test_bedrock.py diff --git a/tests/providers/amazon/aws/hooks/test_chime.py b/providers/tests/amazon/aws/hooks/test_chime.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_chime.py rename to providers/tests/amazon/aws/hooks/test_chime.py diff --git a/tests/providers/amazon/aws/hooks/test_cloud_formation.py b/providers/tests/amazon/aws/hooks/test_cloud_formation.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_cloud_formation.py rename to providers/tests/amazon/aws/hooks/test_cloud_formation.py diff --git a/tests/providers/amazon/aws/hooks/test_comprehend.py b/providers/tests/amazon/aws/hooks/test_comprehend.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_comprehend.py rename to providers/tests/amazon/aws/hooks/test_comprehend.py diff --git a/tests/providers/amazon/aws/hooks/test_datasync.py b/providers/tests/amazon/aws/hooks/test_datasync.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_datasync.py rename to providers/tests/amazon/aws/hooks/test_datasync.py diff --git a/tests/providers/amazon/aws/hooks/test_dms.py b/providers/tests/amazon/aws/hooks/test_dms.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_dms.py rename to providers/tests/amazon/aws/hooks/test_dms.py diff --git a/tests/providers/amazon/aws/hooks/test_dynamodb.py b/providers/tests/amazon/aws/hooks/test_dynamodb.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_dynamodb.py rename to providers/tests/amazon/aws/hooks/test_dynamodb.py diff --git a/tests/providers/amazon/aws/hooks/test_ec2.py b/providers/tests/amazon/aws/hooks/test_ec2.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_ec2.py rename to providers/tests/amazon/aws/hooks/test_ec2.py diff --git a/tests/providers/amazon/aws/hooks/test_ecr.py b/providers/tests/amazon/aws/hooks/test_ecr.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_ecr.py rename to providers/tests/amazon/aws/hooks/test_ecr.py diff --git a/tests/providers/amazon/aws/hooks/test_ecs.py b/providers/tests/amazon/aws/hooks/test_ecs.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_ecs.py rename to providers/tests/amazon/aws/hooks/test_ecs.py diff --git a/tests/providers/amazon/aws/hooks/test_eks.py b/providers/tests/amazon/aws/hooks/test_eks.py similarity index 99% rename from tests/providers/amazon/aws/hooks/test_eks.py rename to providers/tests/amazon/aws/hooks/test_eks.py index ae0f0e19949fa..cf3638d30d37d 100644 --- a/tests/providers/amazon/aws/hooks/test_eks.py +++ b/providers/tests/amazon/aws/hooks/test_eks.py @@ -53,7 +53,8 @@ ) from airflow.providers.amazon.aws.hooks.eks import COMMAND, EksHook -from tests.providers.amazon.aws.utils.eks_test_constants import ( + +from providers.tests.amazon.aws.utils.eks_test_constants import ( DEFAULT_CONN_ID, DEFAULT_NAMESPACE, DISK_SIZE, @@ -82,7 +83,7 @@ RegExTemplates, ResponseAttributes, ) -from tests.providers.amazon.aws.utils.eks_test_utils import ( +from providers.tests.amazon.aws.utils.eks_test_utils import ( attributes_to_test, generate_clusters, generate_dict, diff --git a/tests/providers/amazon/aws/hooks/test_elasticache_replication_group.py b/providers/tests/amazon/aws/hooks/test_elasticache_replication_group.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_elasticache_replication_group.py rename to providers/tests/amazon/aws/hooks/test_elasticache_replication_group.py diff --git a/tests/providers/amazon/aws/hooks/test_emr.py b/providers/tests/amazon/aws/hooks/test_emr.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_emr.py rename to providers/tests/amazon/aws/hooks/test_emr.py diff --git a/tests/providers/amazon/aws/hooks/test_emr_containers.py b/providers/tests/amazon/aws/hooks/test_emr_containers.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_emr_containers.py rename to providers/tests/amazon/aws/hooks/test_emr_containers.py diff --git a/tests/providers/amazon/aws/hooks/test_emr_serverless.py b/providers/tests/amazon/aws/hooks/test_emr_serverless.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_emr_serverless.py rename to providers/tests/amazon/aws/hooks/test_emr_serverless.py diff --git a/tests/providers/amazon/aws/hooks/test_eventbridge.py b/providers/tests/amazon/aws/hooks/test_eventbridge.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_eventbridge.py rename to providers/tests/amazon/aws/hooks/test_eventbridge.py diff --git a/tests/providers/amazon/aws/hooks/test_glacier.py b/providers/tests/amazon/aws/hooks/test_glacier.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_glacier.py rename to providers/tests/amazon/aws/hooks/test_glacier.py diff --git a/tests/providers/amazon/aws/hooks/test_glue.py b/providers/tests/amazon/aws/hooks/test_glue.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_glue.py rename to providers/tests/amazon/aws/hooks/test_glue.py diff --git a/tests/providers/amazon/aws/hooks/test_glue_catalog.py b/providers/tests/amazon/aws/hooks/test_glue_catalog.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_glue_catalog.py rename to providers/tests/amazon/aws/hooks/test_glue_catalog.py diff --git a/tests/providers/amazon/aws/hooks/test_glue_crawler.py b/providers/tests/amazon/aws/hooks/test_glue_crawler.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_glue_crawler.py rename to providers/tests/amazon/aws/hooks/test_glue_crawler.py diff --git a/tests/providers/amazon/aws/hooks/test_glue_databrew.py b/providers/tests/amazon/aws/hooks/test_glue_databrew.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_glue_databrew.py rename to providers/tests/amazon/aws/hooks/test_glue_databrew.py diff --git a/tests/providers/amazon/aws/hooks/test_hooks_signature.py b/providers/tests/amazon/aws/hooks/test_hooks_signature.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_hooks_signature.py rename to providers/tests/amazon/aws/hooks/test_hooks_signature.py diff --git a/tests/providers/amazon/aws/hooks/test_kinesis.py b/providers/tests/amazon/aws/hooks/test_kinesis.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_kinesis.py rename to providers/tests/amazon/aws/hooks/test_kinesis.py diff --git a/tests/providers/amazon/aws/hooks/test_kinesis_analytics.py b/providers/tests/amazon/aws/hooks/test_kinesis_analytics.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_kinesis_analytics.py rename to providers/tests/amazon/aws/hooks/test_kinesis_analytics.py diff --git a/tests/providers/amazon/aws/hooks/test_lambda_function.py b/providers/tests/amazon/aws/hooks/test_lambda_function.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_lambda_function.py rename to providers/tests/amazon/aws/hooks/test_lambda_function.py diff --git a/tests/providers/amazon/aws/hooks/test_logs.py b/providers/tests/amazon/aws/hooks/test_logs.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_logs.py rename to providers/tests/amazon/aws/hooks/test_logs.py diff --git a/tests/providers/amazon/aws/hooks/test_neptune.py b/providers/tests/amazon/aws/hooks/test_neptune.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_neptune.py rename to providers/tests/amazon/aws/hooks/test_neptune.py diff --git a/tests/providers/amazon/aws/hooks/test_opensearch_serverless.py b/providers/tests/amazon/aws/hooks/test_opensearch_serverless.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_opensearch_serverless.py rename to providers/tests/amazon/aws/hooks/test_opensearch_serverless.py diff --git a/tests/providers/amazon/aws/hooks/test_quicksight.py b/providers/tests/amazon/aws/hooks/test_quicksight.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_quicksight.py rename to providers/tests/amazon/aws/hooks/test_quicksight.py diff --git a/tests/providers/amazon/aws/hooks/test_rds.py b/providers/tests/amazon/aws/hooks/test_rds.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_rds.py rename to providers/tests/amazon/aws/hooks/test_rds.py diff --git a/tests/providers/amazon/aws/hooks/test_redshift_cluster.py b/providers/tests/amazon/aws/hooks/test_redshift_cluster.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_redshift_cluster.py rename to providers/tests/amazon/aws/hooks/test_redshift_cluster.py diff --git a/tests/providers/amazon/aws/hooks/test_redshift_data.py b/providers/tests/amazon/aws/hooks/test_redshift_data.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_redshift_data.py rename to providers/tests/amazon/aws/hooks/test_redshift_data.py diff --git a/tests/providers/amazon/aws/hooks/test_redshift_sql.py b/providers/tests/amazon/aws/hooks/test_redshift_sql.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_redshift_sql.py rename to providers/tests/amazon/aws/hooks/test_redshift_sql.py diff --git a/tests/providers/amazon/aws/hooks/test_s3.py b/providers/tests/amazon/aws/hooks/test_s3.py similarity index 99% rename from tests/providers/amazon/aws/hooks/test_s3.py rename to providers/tests/amazon/aws/hooks/test_s3.py index 43c4b94445b6f..2e9a010006bab 100644 --- a/tests/providers/amazon/aws/hooks/test_s3.py +++ b/providers/tests/amazon/aws/hooks/test_s3.py @@ -42,7 +42,8 @@ unify_bucket_name_and_key, ) from airflow.utils.timezone import datetime -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS @pytest.fixture diff --git a/tests/providers/amazon/aws/hooks/test_sagemaker.py b/providers/tests/amazon/aws/hooks/test_sagemaker.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_sagemaker.py rename to providers/tests/amazon/aws/hooks/test_sagemaker.py diff --git a/tests/providers/amazon/aws/hooks/test_secrets_manager.py b/providers/tests/amazon/aws/hooks/test_secrets_manager.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_secrets_manager.py rename to providers/tests/amazon/aws/hooks/test_secrets_manager.py diff --git a/tests/providers/amazon/aws/hooks/test_ses.py b/providers/tests/amazon/aws/hooks/test_ses.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_ses.py rename to providers/tests/amazon/aws/hooks/test_ses.py diff --git a/tests/providers/amazon/aws/hooks/test_sns.py b/providers/tests/amazon/aws/hooks/test_sns.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_sns.py rename to providers/tests/amazon/aws/hooks/test_sns.py diff --git a/tests/providers/amazon/aws/hooks/test_sqs.py b/providers/tests/amazon/aws/hooks/test_sqs.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_sqs.py rename to providers/tests/amazon/aws/hooks/test_sqs.py diff --git a/tests/providers/amazon/aws/hooks/test_ssm.py b/providers/tests/amazon/aws/hooks/test_ssm.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_ssm.py rename to providers/tests/amazon/aws/hooks/test_ssm.py diff --git a/tests/providers/amazon/aws/hooks/test_step_function.py b/providers/tests/amazon/aws/hooks/test_step_function.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_step_function.py rename to providers/tests/amazon/aws/hooks/test_step_function.py diff --git a/tests/providers/amazon/aws/hooks/test_sts.py b/providers/tests/amazon/aws/hooks/test_sts.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_sts.py rename to providers/tests/amazon/aws/hooks/test_sts.py diff --git a/tests/providers/amazon/aws/hooks/test_verified_permissions.py b/providers/tests/amazon/aws/hooks/test_verified_permissions.py similarity index 100% rename from tests/providers/amazon/aws/hooks/test_verified_permissions.py rename to providers/tests/amazon/aws/hooks/test_verified_permissions.py diff --git a/tests/integration/providers/redis/__init__.py b/providers/tests/amazon/aws/infrastructure/__init__.py similarity index 100% rename from tests/integration/providers/redis/__init__.py rename to providers/tests/amazon/aws/infrastructure/__init__.py diff --git a/tests/integration/providers/redis/hooks/__init__.py b/providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/__init__.py similarity index 100% rename from tests/integration/providers/redis/hooks/__init__.py rename to providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/__init__.py diff --git a/tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/outputs.tf b/providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/outputs.tf similarity index 100% rename from tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/outputs.tf rename to providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/outputs.tf diff --git a/tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/resources.tf b/providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/resources.tf similarity index 100% rename from tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/resources.tf rename to providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/resources.tf diff --git a/tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/variables.tf b/providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/variables.tf similarity index 100% rename from tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/variables.tf rename to providers/tests/amazon/aws/infrastructure/example_s3_to_redshift/variables.tf diff --git a/tests/integration/providers/redis/operators/__init__.py b/providers/tests/amazon/aws/links/__init__.py similarity index 100% rename from tests/integration/providers/redis/operators/__init__.py rename to providers/tests/amazon/aws/links/__init__.py diff --git a/tests/providers/amazon/aws/links/test_athena.py b/providers/tests/amazon/aws/links/test_athena.py similarity index 95% rename from tests/providers/amazon/aws/links/test_athena.py rename to providers/tests/amazon/aws/links/test_athena.py index 1729fdf4e5c23..2da3f3fa441d1 100644 --- a/tests/providers/amazon/aws/links/test_athena.py +++ b/providers/tests/amazon/aws/links/test_athena.py @@ -17,7 +17,8 @@ from __future__ import annotations from airflow.providers.amazon.aws.links.athena import AthenaQueryResultsLink -from tests.providers.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase + +from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestAthenaQueryResultsLink(BaseAwsLinksTestCase): diff --git a/tests/providers/amazon/aws/links/test_base_aws.py b/providers/tests/amazon/aws/links/test_base_aws.py similarity index 98% rename from tests/providers/amazon/aws/links/test_base_aws.py rename to providers/tests/amazon/aws/links/test_base_aws.py index 1afcfea0a826f..78622870806fb 100644 --- a/tests/providers/amazon/aws/links/test_base_aws.py +++ b/providers/tests/amazon/aws/links/test_base_aws.py @@ -25,8 +25,9 @@ from airflow.models.xcom import XCom from airflow.providers.amazon.aws.links.base_aws import BaseAwsLink from airflow.serialization.serialized_objects import SerializedDAG -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.mock_operators import MockOperator if TYPE_CHECKING: from airflow.models.taskinstance import TaskInstance diff --git a/tests/providers/amazon/aws/links/test_batch.py b/providers/tests/amazon/aws/links/test_batch.py similarity index 97% rename from tests/providers/amazon/aws/links/test_batch.py rename to providers/tests/amazon/aws/links/test_batch.py index 2c8c9b59cb830..eafe49260c9b4 100644 --- a/tests/providers/amazon/aws/links/test_batch.py +++ b/providers/tests/amazon/aws/links/test_batch.py @@ -21,7 +21,8 @@ BatchJobDetailsLink, BatchJobQueueLink, ) -from tests.providers.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase + +from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestBatchJobDefinitionLink(BaseAwsLinksTestCase): diff --git a/tests/providers/amazon/aws/links/test_emr.py b/providers/tests/amazon/aws/links/test_emr.py similarity index 99% rename from tests/providers/amazon/aws/links/test_emr.py rename to providers/tests/amazon/aws/links/test_emr.py index 50db654d4914a..7510b2a2f50d3 100644 --- a/tests/providers/amazon/aws/links/test_emr.py +++ b/providers/tests/amazon/aws/links/test_emr.py @@ -32,7 +32,8 @@ get_log_uri, get_serverless_dashboard_url, ) -from tests.providers.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase + +from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestEmrClusterLink(BaseAwsLinksTestCase): diff --git a/tests/providers/amazon/aws/links/test_glue.py b/providers/tests/amazon/aws/links/test_glue.py similarity index 95% rename from tests/providers/amazon/aws/links/test_glue.py rename to providers/tests/amazon/aws/links/test_glue.py index 5f929cd3e950a..772ac5ee9c773 100644 --- a/tests/providers/amazon/aws/links/test_glue.py +++ b/providers/tests/amazon/aws/links/test_glue.py @@ -17,7 +17,8 @@ from __future__ import annotations from airflow.providers.amazon.aws.links.glue import GlueJobRunDetailsLink -from tests.providers.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase + +from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestGlueJobRunDetailsLink(BaseAwsLinksTestCase): diff --git a/tests/providers/amazon/aws/links/test_logs.py b/providers/tests/amazon/aws/links/test_logs.py similarity index 96% rename from tests/providers/amazon/aws/links/test_logs.py rename to providers/tests/amazon/aws/links/test_logs.py index 991a8bc6f0c19..b596e7517eafb 100644 --- a/tests/providers/amazon/aws/links/test_logs.py +++ b/providers/tests/amazon/aws/links/test_logs.py @@ -17,7 +17,8 @@ from __future__ import annotations from airflow.providers.amazon.aws.links.logs import CloudWatchEventsLink -from tests.providers.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase + +from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestCloudWatchEventsLink(BaseAwsLinksTestCase): diff --git a/tests/providers/amazon/aws/links/test_step_function.py b/providers/tests/amazon/aws/links/test_step_function.py similarity index 97% rename from tests/providers/amazon/aws/links/test_step_function.py rename to providers/tests/amazon/aws/links/test_step_function.py index 3c6c9cc7cdc27..fab1205689dc9 100644 --- a/tests/providers/amazon/aws/links/test_step_function.py +++ b/providers/tests/amazon/aws/links/test_step_function.py @@ -22,7 +22,8 @@ StateMachineDetailsLink, StateMachineExecutionsDetailsLink, ) -from tests.providers.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase + +from providers.tests.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase class TestStateMachineDetailsLink(BaseAwsLinksTestCase): diff --git a/tests/integration/providers/redis/sensors/__init__.py b/providers/tests/amazon/aws/log/__init__.py similarity index 100% rename from tests/integration/providers/redis/sensors/__init__.py rename to providers/tests/amazon/aws/log/__init__.py diff --git a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py b/providers/tests/amazon/aws/log/test_cloudwatch_task_handler.py similarity index 99% rename from tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py rename to providers/tests/amazon/aws/log/test_cloudwatch_task_handler.py index 801935a5496af..c78fab89e197a 100644 --- a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py +++ b/providers/tests/amazon/aws/log/test_cloudwatch_task_handler.py @@ -35,7 +35,8 @@ from airflow.providers.amazon.aws.utils import datetime_to_epoch_utc_ms from airflow.utils.state import State from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars def get_time_str(time_in_milliseconds): diff --git a/tests/providers/amazon/aws/log/test_s3_task_handler.py b/providers/tests/amazon/aws/log/test_s3_task_handler.py similarity index 99% rename from tests/providers/amazon/aws/log/test_s3_task_handler.py rename to providers/tests/amazon/aws/log/test_s3_task_handler.py index fcb5d3c7833b4..9819cf95e9cc1 100644 --- a/tests/providers/amazon/aws/log/test_s3_task_handler.py +++ b/providers/tests/amazon/aws/log/test_s3_task_handler.py @@ -33,7 +33,8 @@ from airflow.providers.amazon.aws.log.s3_task_handler import S3TaskHandler from airflow.utils.state import State, TaskInstanceState from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars @pytest.fixture(autouse=True) diff --git a/tests/integration/providers/trino/__init__.py b/providers/tests/amazon/aws/notifications/__init__.py similarity index 100% rename from tests/integration/providers/trino/__init__.py rename to providers/tests/amazon/aws/notifications/__init__.py diff --git a/tests/providers/amazon/aws/notifications/test_chime.py b/providers/tests/amazon/aws/notifications/test_chime.py similarity index 100% rename from tests/providers/amazon/aws/notifications/test_chime.py rename to providers/tests/amazon/aws/notifications/test_chime.py diff --git a/tests/providers/amazon/aws/notifications/test_sns.py b/providers/tests/amazon/aws/notifications/test_sns.py similarity index 100% rename from tests/providers/amazon/aws/notifications/test_sns.py rename to providers/tests/amazon/aws/notifications/test_sns.py diff --git a/tests/providers/amazon/aws/notifications/test_sqs.py b/providers/tests/amazon/aws/notifications/test_sqs.py similarity index 100% rename from tests/providers/amazon/aws/notifications/test_sqs.py rename to providers/tests/amazon/aws/notifications/test_sqs.py diff --git a/tests/integration/providers/trino/hooks/__init__.py b/providers/tests/amazon/aws/operators/__init__.py similarity index 100% rename from tests/integration/providers/trino/hooks/__init__.py rename to providers/tests/amazon/aws/operators/__init__.py diff --git a/tests/providers/amazon/aws/operators/athena_metadata.json b/providers/tests/amazon/aws/operators/athena_metadata.json similarity index 100% rename from tests/providers/amazon/aws/operators/athena_metadata.json rename to providers/tests/amazon/aws/operators/athena_metadata.json diff --git a/tests/providers/amazon/aws/operators/test_appflow.py b/providers/tests/amazon/aws/operators/test_appflow.py similarity index 100% rename from tests/providers/amazon/aws/operators/test_appflow.py rename to providers/tests/amazon/aws/operators/test_appflow.py diff --git a/tests/providers/amazon/aws/operators/test_athena.py b/providers/tests/amazon/aws/operators/test_athena.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_athena.py rename to providers/tests/amazon/aws/operators/test_athena.py index 102d1fe31e5c1..e4d7d6622a091 100644 --- a/tests/providers/amazon/aws/operators/test_athena.py +++ b/providers/tests/amazon/aws/operators/test_athena.py @@ -17,6 +17,7 @@ from __future__ import annotations import json +import os from unittest import mock import pytest @@ -39,7 +40,8 @@ from airflow.utils import timezone from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields TEST_DAG_ID = "unit_tests" DEFAULT_DATE = datetime(2018, 1, 1) @@ -299,7 +301,7 @@ def test_operator_openlineage_data(self, mock_conn, mock_region_name): mock_region_name.return_value = "eu-west-1" def mock_get_table_metadata(CatalogName, DatabaseName, TableName): - with open("tests/providers/amazon/aws/operators/athena_metadata.json") as f: + with open(os.path.dirname(__file__) + "/athena_metadata.json") as f: return json.load(f)[TableName] mock_conn.return_value.get_table_metadata = mock_get_table_metadata diff --git a/tests/providers/amazon/aws/operators/test_base_aws.py b/providers/tests/amazon/aws/operators/test_base_aws.py similarity index 100% rename from tests/providers/amazon/aws/operators/test_base_aws.py rename to providers/tests/amazon/aws/operators/test_base_aws.py diff --git a/tests/providers/amazon/aws/operators/test_batch.py b/providers/tests/amazon/aws/operators/test_batch.py similarity index 100% rename from tests/providers/amazon/aws/operators/test_batch.py rename to providers/tests/amazon/aws/operators/test_batch.py diff --git a/tests/providers/amazon/aws/operators/test_bedrock.py b/providers/tests/amazon/aws/operators/test_bedrock.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_bedrock.py rename to providers/tests/amazon/aws/operators/test_bedrock.py index 8cbb67d6f50df..5e8cd32e1a4d4 100644 --- a/tests/providers/amazon/aws/operators/test_bedrock.py +++ b/providers/tests/amazon/aws/operators/test_bedrock.py @@ -35,7 +35,8 @@ BedrockInvokeModelOperator, BedrockRaGOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection diff --git a/tests/providers/amazon/aws/operators/test_cloud_formation.py b/providers/tests/amazon/aws/operators/test_cloud_formation.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_cloud_formation.py rename to providers/tests/amazon/aws/operators/test_cloud_formation.py index 4d8fb4d12bd3c..47b1659b426f0 100644 --- a/tests/providers/amazon/aws/operators/test_cloud_formation.py +++ b/providers/tests/amazon/aws/operators/test_cloud_formation.py @@ -28,7 +28,8 @@ CloudFormationDeleteStackOperator, ) from airflow.utils import timezone -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields DEFAULT_DATE = timezone.datetime(2019, 1, 1) DEFAULT_ARGS = {"owner": "airflow", "start_date": DEFAULT_DATE} diff --git a/tests/providers/amazon/aws/operators/test_comprehend.py b/providers/tests/amazon/aws/operators/test_comprehend.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_comprehend.py rename to providers/tests/amazon/aws/operators/test_comprehend.py index a86b779b1d502..170f7b6760823 100644 --- a/tests/providers/amazon/aws/operators/test_comprehend.py +++ b/providers/tests/amazon/aws/operators/test_comprehend.py @@ -29,7 +29,8 @@ ComprehendStartPiiEntitiesDetectionJobOperator, ) from airflow.utils.types import NOTSET -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection diff --git a/tests/providers/amazon/aws/operators/test_datasync.py b/providers/tests/amazon/aws/operators/test_datasync.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_datasync.py rename to providers/tests/amazon/aws/operators/test_datasync.py index 18b0e86103c0b..52c5863b8aa75 100644 --- a/tests/providers/amazon/aws/operators/test_datasync.py +++ b/providers/tests/amazon/aws/operators/test_datasync.py @@ -29,7 +29,8 @@ from airflow.utils import timezone from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields TEST_DAG_ID = "unit_tests" DEFAULT_DATE = datetime(2018, 1, 1) diff --git a/tests/providers/amazon/aws/operators/test_dms.py b/providers/tests/amazon/aws/operators/test_dms.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_dms.py rename to providers/tests/amazon/aws/operators/test_dms.py index 2528edaef9e0a..0c99dc2cd9334 100644 --- a/tests/providers/amazon/aws/operators/test_dms.py +++ b/providers/tests/amazon/aws/operators/test_dms.py @@ -34,7 +34,8 @@ ) from airflow.utils import timezone from airflow.utils.types import DagRunType -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields TASK_ARN = "test_arn" diff --git a/tests/providers/amazon/aws/operators/test_ec2.py b/providers/tests/amazon/aws/operators/test_ec2.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_ec2.py rename to providers/tests/amazon/aws/operators/test_ec2.py index a5ea81ff6ae87..b4b576c6567f6 100644 --- a/tests/providers/amazon/aws/operators/test_ec2.py +++ b/providers/tests/amazon/aws/operators/test_ec2.py @@ -30,7 +30,8 @@ EC2StopInstanceOperator, EC2TerminateInstanceOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields class BaseEc2TestClass: diff --git a/tests/providers/amazon/aws/operators/test_ecs.py b/providers/tests/amazon/aws/operators/test_ecs.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_ecs.py rename to providers/tests/amazon/aws/operators/test_ecs.py index d7b6c0d4e8716..ed900acb73644 100644 --- a/tests/providers/amazon/aws/operators/test_ecs.py +++ b/providers/tests/amazon/aws/operators/test_ecs.py @@ -38,7 +38,8 @@ from airflow.providers.amazon.aws.triggers.ecs import TaskDoneTrigger from airflow.providers.amazon.aws.utils.task_log_fetcher import AwsTaskLogFetcher from airflow.utils.types import NOTSET -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CLUSTER_NAME = "test_cluster" CONTAINER_NAME = "e1ed7aac-d9b2-4315-8726-d2432bf11868" diff --git a/tests/providers/amazon/aws/operators/test_eks.py b/providers/tests/amazon/aws/operators/test_eks.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_eks.py rename to providers/tests/amazon/aws/operators/test_eks.py index 2daa484626879..8105614849a3f 100644 --- a/tests/providers/amazon/aws/operators/test_eks.py +++ b/providers/tests/amazon/aws/operators/test_eks.py @@ -41,7 +41,8 @@ ) from airflow.providers.cncf.kubernetes.utils.pod_manager import OnFinishAction from airflow.typing_compat import TypedDict -from tests.providers.amazon.aws.utils.eks_test_constants import ( + +from providers.tests.amazon.aws.utils.eks_test_constants import ( NODEROLE_ARN, POD_EXECUTION_ROLE_ARN, RESOURCES_VPC_CONFIG, @@ -50,9 +51,9 @@ SUBNET_IDS, TASK_ID, ) -from tests.providers.amazon.aws.utils.eks_test_utils import convert_keys -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type +from providers.tests.amazon.aws.utils.eks_test_utils import convert_keys +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type CLUSTER_NAME = "cluster1" NODEGROUP_NAME = "nodegroup1" diff --git a/tests/providers/amazon/aws/operators/test_emr_add_steps.py b/providers/tests/amazon/aws/operators/test_emr_add_steps.py similarity index 97% rename from tests/providers/amazon/aws/operators/test_emr_add_steps.py rename to providers/tests/amazon/aws/operators/test_emr_add_steps.py index d5a999349aa53..4414ae4327070 100644 --- a/tests/providers/amazon/aws/operators/test_emr_add_steps.py +++ b/providers/tests/amazon/aws/operators/test_emr_add_steps.py @@ -18,8 +18,8 @@ from __future__ import annotations import json -import os from datetime import timedelta +from pathlib import Path from unittest.mock import MagicMock, call, patch import pytest @@ -31,16 +31,14 @@ from airflow.providers.amazon.aws.triggers.emr import EmrAddStepsTrigger from airflow.utils import timezone from airflow.utils.types import DagRunType -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields -from tests.test_utils import AIRFLOW_MAIN_FOLDER + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields DEFAULT_DATE = timezone.datetime(2017, 1, 1) ADD_STEPS_SUCCESS_RETURN = {"ResponseMetadata": {"HTTPStatusCode": 200}, "StepIds": ["s-2LH3R5GW3A53T"]} -TEMPLATE_SEARCHPATH = os.path.join( - AIRFLOW_MAIN_FOLDER, "tests", "providers", "amazon", "aws", "config_templates" -) +TEMPLATE_SEARCHPATH = Path(__file__).parents[1].joinpath("config_templates").as_posix() @pytest.fixture diff --git a/tests/providers/amazon/aws/operators/test_emr_containers.py b/providers/tests/amazon/aws/operators/test_emr_containers.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_emr_containers.py rename to providers/tests/amazon/aws/operators/test_emr_containers.py index 52306864f3597..b31364bb02f50 100644 --- a/tests/providers/amazon/aws/operators/test_emr_containers.py +++ b/providers/tests/amazon/aws/operators/test_emr_containers.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.hooks.emr import EmrContainerHook from airflow.providers.amazon.aws.operators.emr import EmrContainerOperator, EmrEksCreateClusterOperator from airflow.providers.amazon.aws.triggers.emr import EmrContainerTrigger -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields SUBMIT_JOB_SUCCESS_RETURN = { "ResponseMetadata": {"HTTPStatusCode": 200}, diff --git a/tests/providers/amazon/aws/operators/test_emr_create_job_flow.py b/providers/tests/amazon/aws/operators/test_emr_create_job_flow.py similarity index 95% rename from tests/providers/amazon/aws/operators/test_emr_create_job_flow.py rename to providers/tests/amazon/aws/operators/test_emr_create_job_flow.py index 860df8c7219ac..b2f7c8eb48f7b 100644 --- a/tests/providers/amazon/aws/operators/test_emr_create_job_flow.py +++ b/providers/tests/amazon/aws/operators/test_emr_create_job_flow.py @@ -17,8 +17,8 @@ # under the License. from __future__ import annotations -import os from datetime import timedelta +from pathlib import Path from unittest import mock from unittest.mock import MagicMock, patch @@ -32,9 +32,9 @@ from airflow.providers.amazon.aws.triggers.emr import EmrCreateJobFlowTrigger from airflow.utils import timezone from airflow.utils.types import DagRunType -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type -from tests.test_utils import AIRFLOW_MAIN_FOLDER + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type TASK_ID = "test_task" @@ -45,9 +45,7 @@ JOB_FLOW_ID = "j-8989898989" RUN_JOB_FLOW_SUCCESS_RETURN = {"ResponseMetadata": {"HTTPStatusCode": 200}, "JobFlowId": JOB_FLOW_ID} -TEMPLATE_SEARCHPATH = os.path.join( - AIRFLOW_MAIN_FOLDER, "tests", "providers", "amazon", "aws", "config_templates" -) +TEMPLATE_SEARCHPATH = Path(__file__).parents[1].joinpath("config_templates").as_posix() @pytest.fixture diff --git a/tests/providers/amazon/aws/operators/test_emr_modify_cluster.py b/providers/tests/amazon/aws/operators/test_emr_modify_cluster.py similarity index 97% rename from tests/providers/amazon/aws/operators/test_emr_modify_cluster.py rename to providers/tests/amazon/aws/operators/test_emr_modify_cluster.py index 6f257288760c3..4c7aae9b4c272 100644 --- a/tests/providers/amazon/aws/operators/test_emr_modify_cluster.py +++ b/providers/tests/amazon/aws/operators/test_emr_modify_cluster.py @@ -25,7 +25,8 @@ from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.emr import EmrModifyClusterOperator from airflow.utils import timezone -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields DEFAULT_DATE = timezone.datetime(2017, 1, 1) MODIFY_CLUSTER_SUCCESS_RETURN = {"ResponseMetadata": {"HTTPStatusCode": 200}, "StepConcurrencyLevel": 1} diff --git a/tests/providers/amazon/aws/operators/test_emr_notebook_execution.py b/providers/tests/amazon/aws/operators/test_emr_notebook_execution.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_emr_notebook_execution.py rename to providers/tests/amazon/aws/operators/test_emr_notebook_execution.py index 6fcd4eeb74629..42ce47ea6b699 100644 --- a/tests/providers/amazon/aws/operators/test_emr_notebook_execution.py +++ b/providers/tests/amazon/aws/operators/test_emr_notebook_execution.py @@ -28,8 +28,9 @@ EmrStartNotebookExecutionOperator, EmrStopNotebookExecutionOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type PARAMS = { "EditorId": "test_editor", diff --git a/tests/providers/amazon/aws/operators/test_emr_serverless.py b/providers/tests/amazon/aws/operators/test_emr_serverless.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_emr_serverless.py rename to providers/tests/amazon/aws/operators/test_emr_serverless.py index c84d1032bcf71..bde6e9895f5c9 100644 --- a/tests/providers/amazon/aws/operators/test_emr_serverless.py +++ b/providers/tests/amazon/aws/operators/test_emr_serverless.py @@ -32,7 +32,8 @@ EmrServerlessStopApplicationOperator, ) from airflow.utils.types import NOTSET -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from unittest.mock import MagicMock diff --git a/tests/providers/amazon/aws/operators/test_emr_terminate_job_flow.py b/providers/tests/amazon/aws/operators/test_emr_terminate_job_flow.py similarity index 97% rename from tests/providers/amazon/aws/operators/test_emr_terminate_job_flow.py rename to providers/tests/amazon/aws/operators/test_emr_terminate_job_flow.py index 06ab35e4510ba..6ce3fb29cd6b2 100644 --- a/tests/providers/amazon/aws/operators/test_emr_terminate_job_flow.py +++ b/providers/tests/amazon/aws/operators/test_emr_terminate_job_flow.py @@ -24,7 +24,8 @@ from airflow.exceptions import TaskDeferred from airflow.providers.amazon.aws.operators.emr import EmrTerminateJobFlowOperator from airflow.providers.amazon.aws.triggers.emr import EmrTerminateJobFlowTrigger -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields TERMINATE_SUCCESS_RETURN = {"ResponseMetadata": {"HTTPStatusCode": 200}} diff --git a/tests/providers/amazon/aws/operators/test_eventbridge.py b/providers/tests/amazon/aws/operators/test_eventbridge.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_eventbridge.py rename to providers/tests/amazon/aws/operators/test_eventbridge.py index 3c682f1477e03..129edf9ff6247 100644 --- a/tests/providers/amazon/aws/operators/test_eventbridge.py +++ b/providers/tests/amazon/aws/operators/test_eventbridge.py @@ -29,7 +29,8 @@ EventBridgePutEventsOperator, EventBridgePutRuleOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from unittest.mock import MagicMock diff --git a/tests/providers/amazon/aws/operators/test_glacier.py b/providers/tests/amazon/aws/operators/test_glacier.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_glacier.py rename to providers/tests/amazon/aws/operators/test_glacier.py index f46b0bc929fe8..ff4d220076f89 100644 --- a/tests/providers/amazon/aws/operators/test_glacier.py +++ b/providers/tests/amazon/aws/operators/test_glacier.py @@ -26,7 +26,8 @@ GlacierCreateJobOperator, GlacierUploadArchiveOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator diff --git a/tests/providers/amazon/aws/operators/test_glue.py b/providers/tests/amazon/aws/operators/test_glue.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_glue.py rename to providers/tests/amazon/aws/operators/test_glue.py index e1adcee7d639f..8243940e695fd 100644 --- a/tests/providers/amazon/aws/operators/test_glue.py +++ b/providers/tests/amazon/aws/operators/test_glue.py @@ -34,7 +34,8 @@ GlueDataQualityRuleSetEvaluationRunOperator, GlueJobOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.models import TaskInstance diff --git a/tests/providers/amazon/aws/operators/test_glue_crawler.py b/providers/tests/amazon/aws/operators/test_glue_crawler.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_glue_crawler.py rename to providers/tests/amazon/aws/operators/test_glue_crawler.py index 1e5a3f2177b5b..1df63a4e2ad05 100644 --- a/tests/providers/amazon/aws/operators/test_glue_crawler.py +++ b/providers/tests/amazon/aws/operators/test_glue_crawler.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook from airflow.providers.amazon.aws.hooks.sts import StsHook from airflow.providers.amazon.aws.operators.glue_crawler import GlueCrawlerOperator -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection diff --git a/tests/providers/amazon/aws/operators/test_glue_databrew.py b/providers/tests/amazon/aws/operators/test_glue_databrew.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_glue_databrew.py rename to providers/tests/amazon/aws/operators/test_glue_databrew.py index 698b206acfb1c..1b46549df4545 100644 --- a/tests/providers/amazon/aws/operators/test_glue_databrew.py +++ b/providers/tests/amazon/aws/operators/test_glue_databrew.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.hooks.glue_databrew import GlueDataBrewHook from airflow.providers.amazon.aws.operators.glue_databrew import GlueDataBrewStartJobOperator -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields JOB_NAME = "test_job" diff --git a/tests/providers/amazon/aws/operators/test_kinesis_analytics.py b/providers/tests/amazon/aws/operators/test_kinesis_analytics.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_kinesis_analytics.py rename to providers/tests/amazon/aws/operators/test_kinesis_analytics.py index ab8bb3123008d..b6b92e4c0b134 100644 --- a/tests/providers/amazon/aws/operators/test_kinesis_analytics.py +++ b/providers/tests/amazon/aws/operators/test_kinesis_analytics.py @@ -30,7 +30,8 @@ KinesisAnalyticsV2StartApplicationOperator, KinesisAnalyticsV2StopApplicationOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection diff --git a/tests/providers/amazon/aws/operators/test_lambda_function.py b/providers/tests/amazon/aws/operators/test_lambda_function.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_lambda_function.py rename to providers/tests/amazon/aws/operators/test_lambda_function.py index e3a5b8cad6201..7ec081969103a 100644 --- a/tests/providers/amazon/aws/operators/test_lambda_function.py +++ b/providers/tests/amazon/aws/operators/test_lambda_function.py @@ -29,7 +29,8 @@ LambdaCreateFunctionOperator, LambdaInvokeFunctionOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields FUNCTION_NAME = "function_name" PAYLOADS = [ diff --git a/tests/providers/amazon/aws/operators/test_neptune.py b/providers/tests/amazon/aws/operators/test_neptune.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_neptune.py rename to providers/tests/amazon/aws/operators/test_neptune.py index 146effaaa20f7..0606f06e053a1 100644 --- a/tests/providers/amazon/aws/operators/test_neptune.py +++ b/providers/tests/amazon/aws/operators/test_neptune.py @@ -30,7 +30,8 @@ NeptuneStartDbClusterOperator, NeptuneStopDbClusterOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CLUSTER_ID = "test_cluster" diff --git a/tests/providers/amazon/aws/operators/test_quicksight.py b/providers/tests/amazon/aws/operators/test_quicksight.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_quicksight.py rename to providers/tests/amazon/aws/operators/test_quicksight.py index f2d23c7b81793..063c5fc44a464 100644 --- a/tests/providers/amazon/aws/operators/test_quicksight.py +++ b/providers/tests/amazon/aws/operators/test_quicksight.py @@ -21,7 +21,8 @@ from airflow.providers.amazon.aws.hooks.quicksight import QuickSightHook from airflow.providers.amazon.aws.operators.quicksight import QuickSightCreateIngestionOperator -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields DATA_SET_ID = "DemoDataSet" INGESTION_ID = "DemoDataSet_Ingestion" diff --git a/tests/providers/amazon/aws/operators/test_rds.py b/providers/tests/amazon/aws/operators/test_rds.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_rds.py rename to providers/tests/amazon/aws/operators/test_rds.py index 0574d4b553b48..3d0fbcb84096d 100644 --- a/tests/providers/amazon/aws/operators/test_rds.py +++ b/providers/tests/amazon/aws/operators/test_rds.py @@ -44,7 +44,8 @@ ) from airflow.providers.amazon.aws.triggers.rds import RdsDbAvailableTrigger, RdsDbStoppedTrigger from airflow.utils import timezone -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook diff --git a/tests/providers/amazon/aws/operators/test_redshift_cluster.py b/providers/tests/amazon/aws/operators/test_redshift_cluster.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_redshift_cluster.py rename to providers/tests/amazon/aws/operators/test_redshift_cluster.py index e48f7b2ed96ea..e67bff3390c08 100644 --- a/tests/providers/amazon/aws/operators/test_redshift_cluster.py +++ b/providers/tests/amazon/aws/operators/test_redshift_cluster.py @@ -38,7 +38,8 @@ RedshiftPauseClusterTrigger, RedshiftResumeClusterTrigger, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields class TestRedshiftCreateClusterOperator: diff --git a/tests/providers/amazon/aws/operators/test_redshift_data.py b/providers/tests/amazon/aws/operators/test_redshift_data.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_redshift_data.py rename to providers/tests/amazon/aws/operators/test_redshift_data.py index c4972e4c42e7e..d367fc3ca9596 100644 --- a/tests/providers/amazon/aws/operators/test_redshift_data.py +++ b/providers/tests/amazon/aws/operators/test_redshift_data.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.hooks.redshift_data import QueryExecutionOutput from airflow.providers.amazon.aws.operators.redshift_data import RedshiftDataOperator from airflow.providers.amazon.aws.triggers.redshift_data import RedshiftDataTrigger -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CONN_ID = "aws_conn_test" TASK_ID = "task_id" diff --git a/tests/providers/amazon/aws/operators/test_redshift_sql.py b/providers/tests/amazon/aws/operators/test_redshift_sql.py similarity index 100% rename from tests/providers/amazon/aws/operators/test_redshift_sql.py rename to providers/tests/amazon/aws/operators/test_redshift_sql.py diff --git a/tests/providers/amazon/aws/operators/test_s3.py b/providers/tests/amazon/aws/operators/test_s3.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_s3.py rename to providers/tests/amazon/aws/operators/test_s3.py index 937baefde59ea..8c6dbd2df50fc 100644 --- a/tests/providers/amazon/aws/operators/test_s3.py +++ b/providers/tests/amazon/aws/operators/test_s3.py @@ -57,7 +57,8 @@ from airflow.providers.openlineage.extractors import OperatorLineage from airflow.utils.timezone import datetime, utcnow from airflow.utils.types import DagRunType -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields BUCKET_NAME = os.environ.get("BUCKET_NAME", "test-airflow-bucket") S3_KEY = "test-airflow-key" diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_base.py b/providers/tests/amazon/aws/operators/test_sagemaker_base.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_sagemaker_base.py rename to providers/tests/amazon/aws/operators/test_sagemaker_base.py index 5de40708d5158..e6e1fcd5bd90c 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_base.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_base.py @@ -32,7 +32,8 @@ ) from airflow.utils import timezone from airflow.utils.types import DagRunType -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CONFIG: dict = { "key1": "1", diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_endpoint.py b/providers/tests/amazon/aws/operators/test_sagemaker_endpoint.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_sagemaker_endpoint.py rename to providers/tests/amazon/aws/operators/test_sagemaker_endpoint.py index 24cf944f8db6a..c24cf39e79c3a 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_endpoint.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_endpoint.py @@ -27,7 +27,8 @@ from airflow.providers.amazon.aws.operators import sagemaker from airflow.providers.amazon.aws.operators.sagemaker import SageMakerEndpointOperator from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerTrigger -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CREATE_MODEL_PARAMS: dict = { "ModelName": "model_name", diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_endpoint_config.py b/providers/tests/amazon/aws/operators/test_sagemaker_endpoint_config.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_sagemaker_endpoint_config.py rename to providers/tests/amazon/aws/operators/test_sagemaker_endpoint_config.py index 1169f09d9141d..c62721fcf3fe9 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_endpoint_config.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_endpoint_config.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.hooks.sagemaker import SageMakerHook from airflow.providers.amazon.aws.operators import sagemaker from airflow.providers.amazon.aws.operators.sagemaker import SageMakerEndpointConfigOperator -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CREATE_ENDPOINT_CONFIG_PARAMS: dict = { "EndpointConfigName": "config_name", diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_model.py b/providers/tests/amazon/aws/operators/test_sagemaker_model.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_sagemaker_model.py rename to providers/tests/amazon/aws/operators/test_sagemaker_model.py index 33d1f5b4d1f6f..095c6990ce51a 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_model.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_model.py @@ -31,7 +31,8 @@ SageMakerModelOperator, SageMakerRegisterModelVersionOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CREATE_MODEL_PARAMS: dict = { "ModelName": "model_name", diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_notebook.py b/providers/tests/amazon/aws/operators/test_sagemaker_notebook.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_sagemaker_notebook.py rename to providers/tests/amazon/aws/operators/test_sagemaker_notebook.py index 093e264a3e454..0593b0f798429 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_notebook.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_notebook.py @@ -30,7 +30,8 @@ SageMakerStartNoteBookOperator, SageMakerStopNotebookOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields INSTANCE_NAME = "notebook" INSTANCE_TYPE = "ml.t3.medium" diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_pipeline.py b/providers/tests/amazon/aws/operators/test_sagemaker_pipeline.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_sagemaker_pipeline.py rename to providers/tests/amazon/aws/operators/test_sagemaker_pipeline.py index e7334de98df03..0a2d4d00ad85d 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_pipeline.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_pipeline.py @@ -29,7 +29,8 @@ SageMakerStopPipelineOperator, ) from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerPipelineTrigger -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields if TYPE_CHECKING: from unittest.mock import MagicMock diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_processing.py b/providers/tests/amazon/aws/operators/test_sagemaker_processing.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_sagemaker_processing.py rename to providers/tests/amazon/aws/operators/test_sagemaker_processing.py index b1ca2b62adbb9..898aa68a65100 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_processing.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_processing.py @@ -31,7 +31,8 @@ from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerTrigger from airflow.providers.common.compat.openlineage.facet import Dataset from airflow.providers.openlineage.extractors import OperatorLineage -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields CREATE_PROCESSING_PARAMS: dict = { "AppSpecification": { diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_training.py b/providers/tests/amazon/aws/operators/test_sagemaker_training.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_sagemaker_training.py rename to providers/tests/amazon/aws/operators/test_sagemaker_training.py index 85c6954ac1ac5..f37a41967b9cc 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_training.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_training.py @@ -31,7 +31,8 @@ ) from airflow.providers.common.compat.openlineage.facet import Dataset from airflow.providers.openlineage.extractors import OperatorLineage -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields EXPECTED_INTEGER_FIELDS: list[list[str]] = [ ["ResourceConfig", "InstanceCount"], diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_transform.py b/providers/tests/amazon/aws/operators/test_sagemaker_transform.py similarity index 99% rename from tests/providers/amazon/aws/operators/test_sagemaker_transform.py rename to providers/tests/amazon/aws/operators/test_sagemaker_transform.py index 7804ddde20400..2452558ec4228 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_transform.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_transform.py @@ -30,7 +30,8 @@ from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerTrigger from airflow.providers.common.compat.openlineage.facet import Dataset from airflow.providers.openlineage.extractors import OperatorLineage -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields EXPECTED_INTEGER_FIELDS: list[list[str]] = [ ["Transform", "TransformResources", "InstanceCount"], diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_tuning.py b/providers/tests/amazon/aws/operators/test_sagemaker_tuning.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_sagemaker_tuning.py rename to providers/tests/amazon/aws/operators/test_sagemaker_tuning.py index 78058c771a28d..9048b19b0ec71 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_tuning.py +++ b/providers/tests/amazon/aws/operators/test_sagemaker_tuning.py @@ -26,7 +26,8 @@ from airflow.providers.amazon.aws.operators import sagemaker from airflow.providers.amazon.aws.operators.sagemaker import SageMakerTuningOperator from airflow.providers.amazon.aws.triggers.sagemaker import SageMakerTrigger -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields EXPECTED_INTEGER_FIELDS: list[list[str]] = [ ["HyperParameterTuningJobConfig", "ResourceLimits", "MaxNumberOfTrainingJobs"], diff --git a/tests/providers/amazon/aws/operators/test_sns.py b/providers/tests/amazon/aws/operators/test_sns.py similarity index 97% rename from tests/providers/amazon/aws/operators/test_sns.py rename to providers/tests/amazon/aws/operators/test_sns.py index 6c5de06822d3a..d2571f6cc1866 100644 --- a/tests/providers/amazon/aws/operators/test_sns.py +++ b/providers/tests/amazon/aws/operators/test_sns.py @@ -22,7 +22,8 @@ import pytest from airflow.providers.amazon.aws.operators.sns import SnsPublishOperator -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields TASK_ID = "sns_publish_job" AWS_CONN_ID = "custom_aws_conn" diff --git a/tests/providers/amazon/aws/operators/test_sqs.py b/providers/tests/amazon/aws/operators/test_sqs.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_sqs.py rename to providers/tests/amazon/aws/operators/test_sqs.py index 2187262fe7c36..4534a16bff7ca 100644 --- a/tests/providers/amazon/aws/operators/test_sqs.py +++ b/providers/tests/amazon/aws/operators/test_sqs.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.hooks.sqs import SqsHook from airflow.providers.amazon.aws.operators.sqs import SqsPublishOperator -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields REGION_NAME = "eu-west-1" QUEUE_NAME = "test-queue" diff --git a/tests/providers/amazon/aws/operators/test_step_function.py b/providers/tests/amazon/aws/operators/test_step_function.py similarity index 95% rename from tests/providers/amazon/aws/operators/test_step_function.py rename to providers/tests/amazon/aws/operators/test_step_function.py index 29d743996af48..cac611381ee74 100644 --- a/tests/providers/amazon/aws/operators/test_step_function.py +++ b/providers/tests/amazon/aws/operators/test_step_function.py @@ -26,7 +26,8 @@ StepFunctionGetExecutionOutputOperator, StepFunctionStartExecutionOperator, ) -from tests.providers.amazon.aws.utils.test_template_fields import validate_template_fields + +from providers.tests.amazon.aws.utils.test_template_fields import validate_template_fields EXECUTION_ARN = ( "arn:aws:states:us-east-1:123456789012:execution:" @@ -117,11 +118,14 @@ class TestStepFunctionStartExecutionOperator: @pytest.fixture(autouse=True) def setup_test_cases(self): - with mock.patch( - "airflow.providers.amazon.aws.links.step_function.StateMachineExecutionsDetailsLink.persist" - ) as executions_details_link, mock.patch( - "airflow.providers.amazon.aws.links.step_function.StateMachineDetailsLink.persist" - ) as details_link: + with ( + mock.patch( + "airflow.providers.amazon.aws.links.step_function.StateMachineExecutionsDetailsLink.persist" + ) as executions_details_link, + mock.patch( + "airflow.providers.amazon.aws.links.step_function.StateMachineDetailsLink.persist" + ) as details_link, + ): self.mocked_executions_details_link = executions_details_link self.mocked_details_link = details_link yield diff --git a/tests/integration/providers/ydb/__init__.py b/providers/tests/amazon/aws/secrets/__init__.py similarity index 100% rename from tests/integration/providers/ydb/__init__.py rename to providers/tests/amazon/aws/secrets/__init__.py diff --git a/tests/providers/amazon/aws/secrets/test_secrets_manager.py b/providers/tests/amazon/aws/secrets/test_secrets_manager.py similarity index 100% rename from tests/providers/amazon/aws/secrets/test_secrets_manager.py rename to providers/tests/amazon/aws/secrets/test_secrets_manager.py diff --git a/tests/providers/amazon/aws/secrets/test_systems_manager.py b/providers/tests/amazon/aws/secrets/test_systems_manager.py similarity index 99% rename from tests/providers/amazon/aws/secrets/test_systems_manager.py rename to providers/tests/amazon/aws/secrets/test_systems_manager.py index d4c9c0f9bc136..c1b35a799d3c9 100644 --- a/tests/providers/amazon/aws/secrets/test_systems_manager.py +++ b/providers/tests/amazon/aws/secrets/test_systems_manager.py @@ -24,7 +24,8 @@ from airflow.configuration import initialize_secrets_backends from airflow.providers.amazon.aws.secrets.systems_manager import SystemsManagerParameterStoreBackend -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars URI_CONNECTION = pytest.param( "postgres://my-login:my-pass@my-host:5432/my-schema?param1=val1¶m2=val2", id="uri-connection" diff --git a/tests/integration/providers/ydb/hooks/__init__.py b/providers/tests/amazon/aws/sensors/__init__.py similarity index 100% rename from tests/integration/providers/ydb/hooks/__init__.py rename to providers/tests/amazon/aws/sensors/__init__.py diff --git a/tests/providers/amazon/aws/sensors/test_athena.py b/providers/tests/amazon/aws/sensors/test_athena.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_athena.py rename to providers/tests/amazon/aws/sensors/test_athena.py diff --git a/tests/providers/amazon/aws/sensors/test_base_aws.py b/providers/tests/amazon/aws/sensors/test_base_aws.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_base_aws.py rename to providers/tests/amazon/aws/sensors/test_base_aws.py diff --git a/tests/providers/amazon/aws/sensors/test_batch.py b/providers/tests/amazon/aws/sensors/test_batch.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_batch.py rename to providers/tests/amazon/aws/sensors/test_batch.py diff --git a/tests/providers/amazon/aws/sensors/test_bedrock.py b/providers/tests/amazon/aws/sensors/test_bedrock.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_bedrock.py rename to providers/tests/amazon/aws/sensors/test_bedrock.py diff --git a/tests/providers/amazon/aws/sensors/test_cloud_formation.py b/providers/tests/amazon/aws/sensors/test_cloud_formation.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_cloud_formation.py rename to providers/tests/amazon/aws/sensors/test_cloud_formation.py diff --git a/tests/providers/amazon/aws/sensors/test_comprehend.py b/providers/tests/amazon/aws/sensors/test_comprehend.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_comprehend.py rename to providers/tests/amazon/aws/sensors/test_comprehend.py diff --git a/tests/providers/amazon/aws/sensors/test_dms.py b/providers/tests/amazon/aws/sensors/test_dms.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_dms.py rename to providers/tests/amazon/aws/sensors/test_dms.py diff --git a/tests/providers/amazon/aws/sensors/test_dynamodb.py b/providers/tests/amazon/aws/sensors/test_dynamodb.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_dynamodb.py rename to providers/tests/amazon/aws/sensors/test_dynamodb.py diff --git a/tests/providers/amazon/aws/sensors/test_ec2.py b/providers/tests/amazon/aws/sensors/test_ec2.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_ec2.py rename to providers/tests/amazon/aws/sensors/test_ec2.py diff --git a/tests/providers/amazon/aws/sensors/test_ecs.py b/providers/tests/amazon/aws/sensors/test_ecs.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_ecs.py rename to providers/tests/amazon/aws/sensors/test_ecs.py diff --git a/tests/providers/amazon/aws/sensors/test_eks.py b/providers/tests/amazon/aws/sensors/test_eks.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_eks.py rename to providers/tests/amazon/aws/sensors/test_eks.py diff --git a/tests/providers/amazon/aws/sensors/test_emr_base.py b/providers/tests/amazon/aws/sensors/test_emr_base.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_emr_base.py rename to providers/tests/amazon/aws/sensors/test_emr_base.py diff --git a/tests/providers/amazon/aws/sensors/test_emr_containers.py b/providers/tests/amazon/aws/sensors/test_emr_containers.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_emr_containers.py rename to providers/tests/amazon/aws/sensors/test_emr_containers.py diff --git a/tests/providers/amazon/aws/sensors/test_emr_job_flow.py b/providers/tests/amazon/aws/sensors/test_emr_job_flow.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_emr_job_flow.py rename to providers/tests/amazon/aws/sensors/test_emr_job_flow.py diff --git a/tests/providers/amazon/aws/sensors/test_emr_notebook_execution.py b/providers/tests/amazon/aws/sensors/test_emr_notebook_execution.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_emr_notebook_execution.py rename to providers/tests/amazon/aws/sensors/test_emr_notebook_execution.py diff --git a/tests/providers/amazon/aws/sensors/test_emr_serverless_application.py b/providers/tests/amazon/aws/sensors/test_emr_serverless_application.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_emr_serverless_application.py rename to providers/tests/amazon/aws/sensors/test_emr_serverless_application.py diff --git a/tests/providers/amazon/aws/sensors/test_emr_serverless_job.py b/providers/tests/amazon/aws/sensors/test_emr_serverless_job.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_emr_serverless_job.py rename to providers/tests/amazon/aws/sensors/test_emr_serverless_job.py diff --git a/tests/providers/amazon/aws/sensors/test_emr_step.py b/providers/tests/amazon/aws/sensors/test_emr_step.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_emr_step.py rename to providers/tests/amazon/aws/sensors/test_emr_step.py diff --git a/tests/providers/amazon/aws/sensors/test_glacier.py b/providers/tests/amazon/aws/sensors/test_glacier.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_glacier.py rename to providers/tests/amazon/aws/sensors/test_glacier.py diff --git a/tests/providers/amazon/aws/sensors/test_glue.py b/providers/tests/amazon/aws/sensors/test_glue.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_glue.py rename to providers/tests/amazon/aws/sensors/test_glue.py diff --git a/tests/providers/amazon/aws/sensors/test_glue_catalog_partition.py b/providers/tests/amazon/aws/sensors/test_glue_catalog_partition.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_glue_catalog_partition.py rename to providers/tests/amazon/aws/sensors/test_glue_catalog_partition.py diff --git a/tests/providers/amazon/aws/sensors/test_glue_crawler.py b/providers/tests/amazon/aws/sensors/test_glue_crawler.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_glue_crawler.py rename to providers/tests/amazon/aws/sensors/test_glue_crawler.py diff --git a/tests/providers/amazon/aws/sensors/test_glue_data_quality.py b/providers/tests/amazon/aws/sensors/test_glue_data_quality.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_glue_data_quality.py rename to providers/tests/amazon/aws/sensors/test_glue_data_quality.py diff --git a/tests/providers/amazon/aws/sensors/test_kinesis_analytics.py b/providers/tests/amazon/aws/sensors/test_kinesis_analytics.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_kinesis_analytics.py rename to providers/tests/amazon/aws/sensors/test_kinesis_analytics.py diff --git a/tests/providers/amazon/aws/sensors/test_lambda_function.py b/providers/tests/amazon/aws/sensors/test_lambda_function.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_lambda_function.py rename to providers/tests/amazon/aws/sensors/test_lambda_function.py diff --git a/tests/providers/amazon/aws/sensors/test_opensearch_serverless.py b/providers/tests/amazon/aws/sensors/test_opensearch_serverless.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_opensearch_serverless.py rename to providers/tests/amazon/aws/sensors/test_opensearch_serverless.py diff --git a/tests/providers/amazon/aws/sensors/test_quicksight.py b/providers/tests/amazon/aws/sensors/test_quicksight.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_quicksight.py rename to providers/tests/amazon/aws/sensors/test_quicksight.py diff --git a/tests/providers/amazon/aws/sensors/test_rds.py b/providers/tests/amazon/aws/sensors/test_rds.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_rds.py rename to providers/tests/amazon/aws/sensors/test_rds.py diff --git a/tests/providers/amazon/aws/sensors/test_redshift_cluster.py b/providers/tests/amazon/aws/sensors/test_redshift_cluster.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_redshift_cluster.py rename to providers/tests/amazon/aws/sensors/test_redshift_cluster.py diff --git a/tests/providers/amazon/aws/sensors/test_s3.py b/providers/tests/amazon/aws/sensors/test_s3.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_s3.py rename to providers/tests/amazon/aws/sensors/test_s3.py diff --git a/tests/providers/amazon/aws/sensors/test_sagemaker_automl.py b/providers/tests/amazon/aws/sensors/test_sagemaker_automl.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sagemaker_automl.py rename to providers/tests/amazon/aws/sensors/test_sagemaker_automl.py diff --git a/tests/providers/amazon/aws/sensors/test_sagemaker_base.py b/providers/tests/amazon/aws/sensors/test_sagemaker_base.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sagemaker_base.py rename to providers/tests/amazon/aws/sensors/test_sagemaker_base.py diff --git a/tests/providers/amazon/aws/sensors/test_sagemaker_endpoint.py b/providers/tests/amazon/aws/sensors/test_sagemaker_endpoint.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sagemaker_endpoint.py rename to providers/tests/amazon/aws/sensors/test_sagemaker_endpoint.py diff --git a/tests/providers/amazon/aws/sensors/test_sagemaker_pipeline.py b/providers/tests/amazon/aws/sensors/test_sagemaker_pipeline.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sagemaker_pipeline.py rename to providers/tests/amazon/aws/sensors/test_sagemaker_pipeline.py diff --git a/tests/providers/amazon/aws/sensors/test_sagemaker_training.py b/providers/tests/amazon/aws/sensors/test_sagemaker_training.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sagemaker_training.py rename to providers/tests/amazon/aws/sensors/test_sagemaker_training.py diff --git a/tests/providers/amazon/aws/sensors/test_sagemaker_transform.py b/providers/tests/amazon/aws/sensors/test_sagemaker_transform.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sagemaker_transform.py rename to providers/tests/amazon/aws/sensors/test_sagemaker_transform.py diff --git a/tests/providers/amazon/aws/sensors/test_sagemaker_tuning.py b/providers/tests/amazon/aws/sensors/test_sagemaker_tuning.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sagemaker_tuning.py rename to providers/tests/amazon/aws/sensors/test_sagemaker_tuning.py diff --git a/tests/providers/amazon/aws/sensors/test_sqs.py b/providers/tests/amazon/aws/sensors/test_sqs.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_sqs.py rename to providers/tests/amazon/aws/sensors/test_sqs.py diff --git a/tests/providers/amazon/aws/sensors/test_step_function.py b/providers/tests/amazon/aws/sensors/test_step_function.py similarity index 100% rename from tests/providers/amazon/aws/sensors/test_step_function.py rename to providers/tests/amazon/aws/sensors/test_step_function.py diff --git a/tests/integration/providers/ydb/operators/__init__.py b/providers/tests/amazon/aws/system/__init__.py similarity index 100% rename from tests/integration/providers/ydb/operators/__init__.py rename to providers/tests/amazon/aws/system/__init__.py diff --git a/tests/providers/__init__.py b/providers/tests/amazon/aws/system/utils/__init__.py similarity index 100% rename from tests/providers/__init__.py rename to providers/tests/amazon/aws/system/utils/__init__.py diff --git a/tests/providers/amazon/aws/system/utils/test_helpers.py b/providers/tests/amazon/aws/system/utils/test_helpers.py similarity index 96% rename from tests/providers/amazon/aws/system/utils/test_helpers.py rename to providers/tests/amazon/aws/system/utils/test_helpers.py index f48de1788b74c..20324ebca945f 100644 --- a/tests/providers/amazon/aws/system/utils/test_helpers.py +++ b/providers/tests/amazon/aws/system/utils/test_helpers.py @@ -16,7 +16,7 @@ # under the License. """ This module contains the unit tests for the helper methods included in the Amazon System Tests found at -tests/system/providers/amazon/aws/utils/__init__.py +providers/tests/system/amazon/aws/utils/__init__.py """ from __future__ import annotations @@ -29,8 +29,8 @@ import pytest from moto import mock_aws -from tests.system.providers.amazon.aws import utils -from tests.system.providers.amazon.aws.utils import ( +from providers.tests.system.amazon.aws import utils +from providers.tests.system.amazon.aws.utils import ( DEFAULT_ENV_ID_LEN, DEFAULT_ENV_ID_PREFIX, ENV_ID_ENVIRON_KEY, diff --git a/tests/providers/airbyte/__init__.py b/providers/tests/amazon/aws/transfers/__init__.py similarity index 100% rename from tests/providers/airbyte/__init__.py rename to providers/tests/amazon/aws/transfers/__init__.py diff --git a/tests/providers/amazon/aws/transfers/test_azure_blob_to_s3.py b/providers/tests/amazon/aws/transfers/test_azure_blob_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_azure_blob_to_s3.py rename to providers/tests/amazon/aws/transfers/test_azure_blob_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_base.py b/providers/tests/amazon/aws/transfers/test_base.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_base.py rename to providers/tests/amazon/aws/transfers/test_base.py diff --git a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py b/providers/tests/amazon/aws/transfers/test_dynamodb_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py rename to providers/tests/amazon/aws/transfers/test_dynamodb_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_exasol_to_s3.py b/providers/tests/amazon/aws/transfers/test_exasol_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_exasol_to_s3.py rename to providers/tests/amazon/aws/transfers/test_exasol_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_ftp_to_s3.py b/providers/tests/amazon/aws/transfers/test_ftp_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_ftp_to_s3.py rename to providers/tests/amazon/aws/transfers/test_ftp_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_gcs_to_s3.py b/providers/tests/amazon/aws/transfers/test_gcs_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_gcs_to_s3.py rename to providers/tests/amazon/aws/transfers/test_gcs_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_glacier_to_gcs.py b/providers/tests/amazon/aws/transfers/test_glacier_to_gcs.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_glacier_to_gcs.py rename to providers/tests/amazon/aws/transfers/test_glacier_to_gcs.py diff --git a/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py b/providers/tests/amazon/aws/transfers/test_google_api_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_google_api_to_s3.py rename to providers/tests/amazon/aws/transfers/test_google_api_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py b/providers/tests/amazon/aws/transfers/test_hive_to_dynamodb.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py rename to providers/tests/amazon/aws/transfers/test_hive_to_dynamodb.py diff --git a/tests/providers/amazon/aws/transfers/test_http_to_s3.py b/providers/tests/amazon/aws/transfers/test_http_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_http_to_s3.py rename to providers/tests/amazon/aws/transfers/test_http_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py b/providers/tests/amazon/aws/transfers/test_imap_attachment_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py rename to providers/tests/amazon/aws/transfers/test_imap_attachment_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_local_to_s3.py b/providers/tests/amazon/aws/transfers/test_local_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_local_to_s3.py rename to providers/tests/amazon/aws/transfers/test_local_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py b/providers/tests/amazon/aws/transfers/test_mongo_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_mongo_to_s3.py rename to providers/tests/amazon/aws/transfers/test_mongo_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py b/providers/tests/amazon/aws/transfers/test_redshift_to_s3.py similarity index 99% rename from tests/providers/amazon/aws/transfers/test_redshift_to_s3.py rename to providers/tests/amazon/aws/transfers/test_redshift_to_s3.py index 2d28acd22e7e6..e27ff2d0aa56d 100644 --- a/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py +++ b/providers/tests/amazon/aws/transfers/test_redshift_to_s3.py @@ -27,7 +27,8 @@ from airflow.models.connection import Connection from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator from airflow.providers.amazon.aws.utils.redshift import build_credentials_block -from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces + +from dev.tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces class TestRedshiftToS3Transfer: diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_dynamodb.py b/providers/tests/amazon/aws/transfers/test_s3_to_dynamodb.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_s3_to_dynamodb.py rename to providers/tests/amazon/aws/transfers/test_s3_to_dynamodb.py diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_ftp.py b/providers/tests/amazon/aws/transfers/test_s3_to_ftp.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_s3_to_ftp.py rename to providers/tests/amazon/aws/transfers/test_s3_to_ftp.py diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py b/providers/tests/amazon/aws/transfers/test_s3_to_redshift.py similarity index 99% rename from tests/providers/amazon/aws/transfers/test_s3_to_redshift.py rename to providers/tests/amazon/aws/transfers/test_s3_to_redshift.py index b80c5991626c0..6e300791e0935 100644 --- a/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py +++ b/providers/tests/amazon/aws/transfers/test_s3_to_redshift.py @@ -33,7 +33,8 @@ SchemaDatasetFacet, SchemaDatasetFacetFields, ) -from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces + +from dev.tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces class TestS3ToRedshiftTransfer: diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py b/providers/tests/amazon/aws/transfers/test_s3_to_sftp.py similarity index 98% rename from tests/providers/amazon/aws/transfers/test_s3_to_sftp.py rename to providers/tests/amazon/aws/transfers/test_s3_to_sftp.py index 58f8a2d6f0f74..545398d9666f8 100644 --- a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py +++ b/providers/tests/amazon/aws/transfers/test_s3_to_sftp.py @@ -27,7 +27,8 @@ from airflow.providers.ssh.hooks.ssh import SSHHook from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_sql.py b/providers/tests/amazon/aws/transfers/test_s3_to_sql.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_s3_to_sql.py rename to providers/tests/amazon/aws/transfers/test_s3_to_sql.py diff --git a/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py b/providers/tests/amazon/aws/transfers/test_salesforce_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py rename to providers/tests/amazon/aws/transfers/test_salesforce_to_s3.py diff --git a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py b/providers/tests/amazon/aws/transfers/test_sftp_to_s3.py similarity index 98% rename from tests/providers/amazon/aws/transfers/test_sftp_to_s3.py rename to providers/tests/amazon/aws/transfers/test_sftp_to_s3.py index be438a85a4950..e4afe5c8c1efb 100644 --- a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py +++ b/providers/tests/amazon/aws/transfers/test_sftp_to_s3.py @@ -27,7 +27,8 @@ from airflow.providers.ssh.hooks.ssh import SSHHook from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/amazon/aws/transfers/test_sql_to_s3.py b/providers/tests/amazon/aws/transfers/test_sql_to_s3.py similarity index 100% rename from tests/providers/amazon/aws/transfers/test_sql_to_s3.py rename to providers/tests/amazon/aws/transfers/test_sql_to_s3.py diff --git a/tests/providers/airbyte/hooks/__init__.py b/providers/tests/amazon/aws/triggers/__init__.py similarity index 100% rename from tests/providers/airbyte/hooks/__init__.py rename to providers/tests/amazon/aws/triggers/__init__.py diff --git a/tests/providers/amazon/aws/triggers/test_athena.py b/providers/tests/amazon/aws/triggers/test_athena.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_athena.py rename to providers/tests/amazon/aws/triggers/test_athena.py diff --git a/tests/providers/amazon/aws/triggers/test_base.py b/providers/tests/amazon/aws/triggers/test_base.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_base.py rename to providers/tests/amazon/aws/triggers/test_base.py diff --git a/tests/providers/amazon/aws/triggers/test_batch.py b/providers/tests/amazon/aws/triggers/test_batch.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_batch.py rename to providers/tests/amazon/aws/triggers/test_batch.py diff --git a/tests/providers/amazon/aws/triggers/test_bedrock.py b/providers/tests/amazon/aws/triggers/test_bedrock.py similarity index 99% rename from tests/providers/amazon/aws/triggers/test_bedrock.py rename to providers/tests/amazon/aws/triggers/test_bedrock.py index 90112d8c1dc1d..a619d39b1307c 100644 --- a/tests/providers/amazon/aws/triggers/test_bedrock.py +++ b/providers/tests/amazon/aws/triggers/test_bedrock.py @@ -29,7 +29,8 @@ BedrockProvisionModelThroughputCompletedTrigger, ) from airflow.triggers.base import TriggerEvent -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type + +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.bedrock." diff --git a/tests/providers/amazon/aws/triggers/test_comprehend.py b/providers/tests/amazon/aws/triggers/test_comprehend.py similarity index 98% rename from tests/providers/amazon/aws/triggers/test_comprehend.py rename to providers/tests/amazon/aws/triggers/test_comprehend.py index f70d9fc0d696f..c7a53e2d70fce 100644 --- a/tests/providers/amazon/aws/triggers/test_comprehend.py +++ b/providers/tests/amazon/aws/triggers/test_comprehend.py @@ -27,7 +27,8 @@ ComprehendPiiEntitiesDetectionJobCompletedTrigger, ) from airflow.triggers.base import TriggerEvent -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type + +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.comprehend." diff --git a/tests/providers/amazon/aws/triggers/test_ec2.py b/providers/tests/amazon/aws/triggers/test_ec2.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_ec2.py rename to providers/tests/amazon/aws/triggers/test_ec2.py diff --git a/tests/providers/amazon/aws/triggers/test_ecs.py b/providers/tests/amazon/aws/triggers/test_ecs.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_ecs.py rename to providers/tests/amazon/aws/triggers/test_ecs.py diff --git a/tests/providers/amazon/aws/triggers/test_eks.py b/providers/tests/amazon/aws/triggers/test_eks.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_eks.py rename to providers/tests/amazon/aws/triggers/test_eks.py diff --git a/tests/providers/amazon/aws/triggers/test_emr.py b/providers/tests/amazon/aws/triggers/test_emr.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_emr.py rename to providers/tests/amazon/aws/triggers/test_emr.py diff --git a/tests/providers/amazon/aws/triggers/test_glue.py b/providers/tests/amazon/aws/triggers/test_glue.py similarity index 99% rename from tests/providers/amazon/aws/triggers/test_glue.py rename to providers/tests/amazon/aws/triggers/test_glue.py index 2ae3e57084ea1..e39e38d8b7609 100644 --- a/tests/providers/amazon/aws/triggers/test_glue.py +++ b/providers/tests/amazon/aws/triggers/test_glue.py @@ -32,7 +32,8 @@ GlueJobCompleteTrigger, ) from airflow.triggers.base import TriggerEvent -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type + +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.glue." diff --git a/tests/providers/amazon/aws/triggers/test_glue_crawler.py b/providers/tests/amazon/aws/triggers/test_glue_crawler.py similarity index 97% rename from tests/providers/amazon/aws/triggers/test_glue_crawler.py rename to providers/tests/amazon/aws/triggers/test_glue_crawler.py index 8975aa1aff505..fadc14fa0f271 100644 --- a/tests/providers/amazon/aws/triggers/test_glue_crawler.py +++ b/providers/tests/amazon/aws/triggers/test_glue_crawler.py @@ -24,7 +24,8 @@ from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook from airflow.providers.amazon.aws.triggers.glue_crawler import GlueCrawlerCompleteTrigger from airflow.triggers.base import TriggerEvent -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type + +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type class TestGlueCrawlerCompleteTrigger: diff --git a/tests/providers/amazon/aws/triggers/test_glue_databrew.py b/providers/tests/amazon/aws/triggers/test_glue_databrew.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_glue_databrew.py rename to providers/tests/amazon/aws/triggers/test_glue_databrew.py diff --git a/tests/providers/amazon/aws/triggers/test_kinesis_analytics.py b/providers/tests/amazon/aws/triggers/test_kinesis_analytics.py similarity index 98% rename from tests/providers/amazon/aws/triggers/test_kinesis_analytics.py rename to providers/tests/amazon/aws/triggers/test_kinesis_analytics.py index 3692905f22999..6a0f321545060 100644 --- a/tests/providers/amazon/aws/triggers/test_kinesis_analytics.py +++ b/providers/tests/amazon/aws/triggers/test_kinesis_analytics.py @@ -26,7 +26,8 @@ KinesisAnalyticsV2ApplicationOperationCompleteTrigger, ) from airflow.triggers.base import TriggerEvent -from tests.providers.amazon.aws.utils.test_waiter import assert_expected_waiter_type + +from providers.tests.amazon.aws.utils.test_waiter import assert_expected_waiter_type BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.kinesis_analytics." diff --git a/tests/providers/amazon/aws/triggers/test_lambda_function.py b/providers/tests/amazon/aws/triggers/test_lambda_function.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_lambda_function.py rename to providers/tests/amazon/aws/triggers/test_lambda_function.py diff --git a/tests/providers/amazon/aws/triggers/test_neptune.py b/providers/tests/amazon/aws/triggers/test_neptune.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_neptune.py rename to providers/tests/amazon/aws/triggers/test_neptune.py diff --git a/tests/providers/amazon/aws/triggers/test_opensearch_serverless.py b/providers/tests/amazon/aws/triggers/test_opensearch_serverless.py similarity index 98% rename from tests/providers/amazon/aws/triggers/test_opensearch_serverless.py rename to providers/tests/amazon/aws/triggers/test_opensearch_serverless.py index c992d6a50da69..429b406072c58 100644 --- a/tests/providers/amazon/aws/triggers/test_opensearch_serverless.py +++ b/providers/tests/amazon/aws/triggers/test_opensearch_serverless.py @@ -27,7 +27,8 @@ ) from airflow.triggers.base import TriggerEvent from airflow.utils.helpers import prune_dict -from tests.providers.amazon.aws.triggers.test_base import TestAwsBaseWaiterTrigger + +from providers.tests.amazon.aws.triggers.test_base import TestAwsBaseWaiterTrigger BASE_TRIGGER_CLASSPATH = "airflow.providers.amazon.aws.triggers.opensearch_serverless." diff --git a/tests/providers/amazon/aws/triggers/test_rds.py b/providers/tests/amazon/aws/triggers/test_rds.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_rds.py rename to providers/tests/amazon/aws/triggers/test_rds.py diff --git a/tests/providers/amazon/aws/triggers/test_redshift_cluster.py b/providers/tests/amazon/aws/triggers/test_redshift_cluster.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_redshift_cluster.py rename to providers/tests/amazon/aws/triggers/test_redshift_cluster.py diff --git a/tests/providers/amazon/aws/triggers/test_redshift_data.py b/providers/tests/amazon/aws/triggers/test_redshift_data.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_redshift_data.py rename to providers/tests/amazon/aws/triggers/test_redshift_data.py diff --git a/tests/providers/amazon/aws/triggers/test_s3.py b/providers/tests/amazon/aws/triggers/test_s3.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_s3.py rename to providers/tests/amazon/aws/triggers/test_s3.py diff --git a/tests/providers/amazon/aws/triggers/test_sagemaker.py b/providers/tests/amazon/aws/triggers/test_sagemaker.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_sagemaker.py rename to providers/tests/amazon/aws/triggers/test_sagemaker.py diff --git a/tests/providers/amazon/aws/triggers/test_serialization.py b/providers/tests/amazon/aws/triggers/test_serialization.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_serialization.py rename to providers/tests/amazon/aws/triggers/test_serialization.py diff --git a/tests/providers/amazon/aws/triggers/test_sqs.py b/providers/tests/amazon/aws/triggers/test_sqs.py similarity index 100% rename from tests/providers/amazon/aws/triggers/test_sqs.py rename to providers/tests/amazon/aws/triggers/test_sqs.py diff --git a/tests/providers/airbyte/operators/__init__.py b/providers/tests/amazon/aws/utils/__init__.py similarity index 100% rename from tests/providers/airbyte/operators/__init__.py rename to providers/tests/amazon/aws/utils/__init__.py diff --git a/tests/providers/amazon/aws/utils/eks_test_constants.py b/providers/tests/amazon/aws/utils/eks_test_constants.py similarity index 100% rename from tests/providers/amazon/aws/utils/eks_test_constants.py rename to providers/tests/amazon/aws/utils/eks_test_constants.py diff --git a/tests/providers/amazon/aws/utils/eks_test_utils.py b/providers/tests/amazon/aws/utils/eks_test_utils.py similarity index 99% rename from tests/providers/amazon/aws/utils/eks_test_utils.py rename to providers/tests/amazon/aws/utils/eks_test_utils.py index fc75880f21f41..8bc9bd9e26227 100644 --- a/tests/providers/amazon/aws/utils/eks_test_utils.py +++ b/providers/tests/amazon/aws/utils/eks_test_utils.py @@ -21,7 +21,7 @@ from copy import deepcopy from typing import TYPE_CHECKING, Pattern, Type, Union -from tests.providers.amazon.aws.utils.eks_test_constants import ( +from providers.tests.amazon.aws.utils.eks_test_constants import ( STATUS, ClusterAttributes, ClusterInputs, diff --git a/tests/providers/amazon/aws/utils/test_connection_wrapper.py b/providers/tests/amazon/aws/utils/test_connection_wrapper.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_connection_wrapper.py rename to providers/tests/amazon/aws/utils/test_connection_wrapper.py diff --git a/tests/providers/amazon/aws/utils/test_eks_get_token.py b/providers/tests/amazon/aws/utils/test_eks_get_token.py similarity index 97% rename from tests/providers/amazon/aws/utils/test_eks_get_token.py rename to providers/tests/amazon/aws/utils/test_eks_get_token.py index 672ccfc9b3fd1..1d6300e45851e 100644 --- a/tests/providers/amazon/aws/utils/test_eks_get_token.py +++ b/providers/tests/amazon/aws/utils/test_eks_get_token.py @@ -63,13 +63,13 @@ class TestGetEksToken: ], ], ) - def test_run(self, mock_eks_hook, args, expected_aws_conn_id, expected_region_name, airflow_root_path): + def test_run(self, mock_eks_hook, args, expected_aws_conn_id, expected_region_name, providers_src_folder): ( mock_eks_hook.return_value.fetch_access_token_for_cluster.return_value ) = "k8s-aws-v1.aHR0cDovL2V4YW1wbGUuY29t" with mock.patch("sys.argv", args), contextlib.redirect_stdout(StringIO()) as temp_stdout: - os.chdir(airflow_root_path) + os.chdir(providers_src_folder) # We are not using run_module because of https://github.com/pytest-dev/pytest/issues/9007 runpy.run_path("airflow/providers/amazon/aws/utils/eks_get_token.py", run_name="__main__") output = temp_stdout.getvalue() diff --git a/tests/providers/amazon/aws/utils/test_emailer.py b/providers/tests/amazon/aws/utils/test_emailer.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_emailer.py rename to providers/tests/amazon/aws/utils/test_emailer.py diff --git a/tests/providers/amazon/aws/utils/test_identifiers.py b/providers/tests/amazon/aws/utils/test_identifiers.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_identifiers.py rename to providers/tests/amazon/aws/utils/test_identifiers.py diff --git a/tests/providers/amazon/aws/utils/test_mixins.py b/providers/tests/amazon/aws/utils/test_mixins.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_mixins.py rename to providers/tests/amazon/aws/utils/test_mixins.py diff --git a/tests/providers/amazon/aws/utils/test_openlineage.py b/providers/tests/amazon/aws/utils/test_openlineage.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_openlineage.py rename to providers/tests/amazon/aws/utils/test_openlineage.py diff --git a/tests/providers/amazon/aws/utils/test_redshift.py b/providers/tests/amazon/aws/utils/test_redshift.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_redshift.py rename to providers/tests/amazon/aws/utils/test_redshift.py diff --git a/tests/providers/amazon/aws/utils/test_sqs.py b/providers/tests/amazon/aws/utils/test_sqs.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_sqs.py rename to providers/tests/amazon/aws/utils/test_sqs.py diff --git a/tests/providers/amazon/aws/utils/test_suppress.py b/providers/tests/amazon/aws/utils/test_suppress.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_suppress.py rename to providers/tests/amazon/aws/utils/test_suppress.py diff --git a/tests/providers/amazon/aws/utils/test_tags.py b/providers/tests/amazon/aws/utils/test_tags.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_tags.py rename to providers/tests/amazon/aws/utils/test_tags.py diff --git a/tests/providers/amazon/aws/utils/test_task_log_fetcher.py b/providers/tests/amazon/aws/utils/test_task_log_fetcher.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_task_log_fetcher.py rename to providers/tests/amazon/aws/utils/test_task_log_fetcher.py diff --git a/tests/providers/amazon/aws/utils/test_template_fields.py b/providers/tests/amazon/aws/utils/test_template_fields.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_template_fields.py rename to providers/tests/amazon/aws/utils/test_template_fields.py diff --git a/tests/providers/amazon/aws/utils/test_utils.py b/providers/tests/amazon/aws/utils/test_utils.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_utils.py rename to providers/tests/amazon/aws/utils/test_utils.py diff --git a/tests/providers/amazon/aws/utils/test_waiter.py b/providers/tests/amazon/aws/utils/test_waiter.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_waiter.py rename to providers/tests/amazon/aws/utils/test_waiter.py diff --git a/tests/providers/amazon/aws/utils/test_waiter_with_logging.py b/providers/tests/amazon/aws/utils/test_waiter_with_logging.py similarity index 100% rename from tests/providers/amazon/aws/utils/test_waiter_with_logging.py rename to providers/tests/amazon/aws/utils/test_waiter_with_logging.py diff --git a/tests/providers/airbyte/sensors/__init__.py b/providers/tests/amazon/aws/waiters/__init__.py similarity index 100% rename from tests/providers/airbyte/sensors/__init__.py rename to providers/tests/amazon/aws/waiters/__init__.py diff --git a/tests/providers/amazon/aws/waiters/test.json b/providers/tests/amazon/aws/waiters/test.json similarity index 100% rename from tests/providers/amazon/aws/waiters/test.json rename to providers/tests/amazon/aws/waiters/test.json diff --git a/tests/providers/amazon/aws/waiters/test_batch.py b/providers/tests/amazon/aws/waiters/test_batch.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_batch.py rename to providers/tests/amazon/aws/waiters/test_batch.py diff --git a/tests/providers/amazon/aws/waiters/test_bedrock.py b/providers/tests/amazon/aws/waiters/test_bedrock.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_bedrock.py rename to providers/tests/amazon/aws/waiters/test_bedrock.py diff --git a/tests/providers/amazon/aws/waiters/test_bedrock_agent.py b/providers/tests/amazon/aws/waiters/test_bedrock_agent.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_bedrock_agent.py rename to providers/tests/amazon/aws/waiters/test_bedrock_agent.py diff --git a/tests/providers/amazon/aws/waiters/test_comprehend.py b/providers/tests/amazon/aws/waiters/test_comprehend.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_comprehend.py rename to providers/tests/amazon/aws/waiters/test_comprehend.py diff --git a/tests/providers/amazon/aws/waiters/test_custom_waiters.py b/providers/tests/amazon/aws/waiters/test_custom_waiters.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_custom_waiters.py rename to providers/tests/amazon/aws/waiters/test_custom_waiters.py diff --git a/tests/providers/amazon/aws/waiters/test_dynamo.py b/providers/tests/amazon/aws/waiters/test_dynamo.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_dynamo.py rename to providers/tests/amazon/aws/waiters/test_dynamo.py diff --git a/tests/providers/amazon/aws/waiters/test_ecs.py b/providers/tests/amazon/aws/waiters/test_ecs.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_ecs.py rename to providers/tests/amazon/aws/waiters/test_ecs.py diff --git a/tests/providers/amazon/aws/waiters/test_eks.py b/providers/tests/amazon/aws/waiters/test_eks.py similarity index 97% rename from tests/providers/amazon/aws/waiters/test_eks.py rename to providers/tests/amazon/aws/waiters/test_eks.py index 9013c8b7c6fca..6a3aa8cf0273a 100644 --- a/tests/providers/amazon/aws/waiters/test_eks.py +++ b/providers/tests/amazon/aws/waiters/test_eks.py @@ -22,7 +22,8 @@ from moto import mock_aws from airflow.providers.amazon.aws.hooks.eks import EksHook -from tests.providers.amazon.aws.waiters.test_custom_waiters import assert_all_match + +from providers.tests.amazon.aws.waiters.test_custom_waiters import assert_all_match class TestCustomEKSServiceWaiters: diff --git a/tests/providers/amazon/aws/waiters/test_emr.py b/providers/tests/amazon/aws/waiters/test_emr.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_emr.py rename to providers/tests/amazon/aws/waiters/test_emr.py diff --git a/tests/providers/amazon/aws/waiters/test_glue.py b/providers/tests/amazon/aws/waiters/test_glue.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_glue.py rename to providers/tests/amazon/aws/waiters/test_glue.py diff --git a/tests/providers/amazon/aws/waiters/test_glue_databrew.py b/providers/tests/amazon/aws/waiters/test_glue_databrew.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_glue_databrew.py rename to providers/tests/amazon/aws/waiters/test_glue_databrew.py diff --git a/tests/providers/amazon/aws/waiters/test_kinesis_analytics.py b/providers/tests/amazon/aws/waiters/test_kinesis_analytics.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_kinesis_analytics.py rename to providers/tests/amazon/aws/waiters/test_kinesis_analytics.py diff --git a/tests/providers/amazon/aws/waiters/test_neptune.py b/providers/tests/amazon/aws/waiters/test_neptune.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_neptune.py rename to providers/tests/amazon/aws/waiters/test_neptune.py diff --git a/tests/providers/amazon/aws/waiters/test_opensearch_serverless.py b/providers/tests/amazon/aws/waiters/test_opensearch_serverless.py similarity index 100% rename from tests/providers/amazon/aws/waiters/test_opensearch_serverless.py rename to providers/tests/amazon/aws/waiters/test_opensearch_serverless.py diff --git a/tests/providers/amazon/conftest.py b/providers/tests/amazon/conftest.py similarity index 100% rename from tests/providers/amazon/conftest.py rename to providers/tests/amazon/conftest.py diff --git a/tests/providers/airbyte/triggers/__init__.py b/providers/tests/apache/__init__.py similarity index 100% rename from tests/providers/airbyte/triggers/__init__.py rename to providers/tests/apache/__init__.py diff --git a/tests/providers/alibaba/__init__.py b/providers/tests/apache/beam/__init__.py similarity index 100% rename from tests/providers/alibaba/__init__.py rename to providers/tests/apache/beam/__init__.py diff --git a/tests/providers/alibaba/cloud/__init__.py b/providers/tests/apache/beam/hooks/__init__.py similarity index 100% rename from tests/providers/alibaba/cloud/__init__.py rename to providers/tests/apache/beam/hooks/__init__.py diff --git a/tests/providers/apache/beam/hooks/test_beam.py b/providers/tests/apache/beam/hooks/test_beam.py similarity index 100% rename from tests/providers/apache/beam/hooks/test_beam.py rename to providers/tests/apache/beam/hooks/test_beam.py diff --git a/tests/providers/alibaba/cloud/hooks/__init__.py b/providers/tests/apache/beam/operators/__init__.py similarity index 100% rename from tests/providers/alibaba/cloud/hooks/__init__.py rename to providers/tests/apache/beam/operators/__init__.py diff --git a/tests/providers/apache/beam/operators/test_beam.py b/providers/tests/apache/beam/operators/test_beam.py similarity index 100% rename from tests/providers/apache/beam/operators/test_beam.py rename to providers/tests/apache/beam/operators/test_beam.py diff --git a/tests/providers/alibaba/cloud/log/__init__.py b/providers/tests/apache/beam/triggers/__init__.py similarity index 100% rename from tests/providers/alibaba/cloud/log/__init__.py rename to providers/tests/apache/beam/triggers/__init__.py diff --git a/tests/providers/apache/beam/triggers/test_beam.py b/providers/tests/apache/beam/triggers/test_beam.py similarity index 100% rename from tests/providers/apache/beam/triggers/test_beam.py rename to providers/tests/apache/beam/triggers/test_beam.py diff --git a/tests/providers/alibaba/cloud/operators/__init__.py b/providers/tests/apache/cassandra/__init__.py similarity index 100% rename from tests/providers/alibaba/cloud/operators/__init__.py rename to providers/tests/apache/cassandra/__init__.py diff --git a/tests/providers/alibaba/cloud/sensors/__init__.py b/providers/tests/apache/cassandra/sensors/__init__.py similarity index 100% rename from tests/providers/alibaba/cloud/sensors/__init__.py rename to providers/tests/apache/cassandra/sensors/__init__.py diff --git a/tests/providers/apache/cassandra/sensors/test_record.py b/providers/tests/apache/cassandra/sensors/test_record.py similarity index 100% rename from tests/providers/apache/cassandra/sensors/test_record.py rename to providers/tests/apache/cassandra/sensors/test_record.py diff --git a/tests/providers/apache/cassandra/sensors/test_table.py b/providers/tests/apache/cassandra/sensors/test_table.py similarity index 100% rename from tests/providers/apache/cassandra/sensors/test_table.py rename to providers/tests/apache/cassandra/sensors/test_table.py diff --git a/airflow/providers/vertica/operators/__init__.py b/providers/tests/apache/drill/__init__.py similarity index 100% rename from airflow/providers/vertica/operators/__init__.py rename to providers/tests/apache/drill/__init__.py diff --git a/airflow/providers/zendesk/hooks/__init__.py b/providers/tests/apache/drill/hooks/__init__.py similarity index 100% rename from airflow/providers/zendesk/hooks/__init__.py rename to providers/tests/apache/drill/hooks/__init__.py diff --git a/tests/providers/apache/drill/hooks/test_drill.py b/providers/tests/apache/drill/hooks/test_drill.py similarity index 94% rename from tests/providers/apache/drill/hooks/test_drill.py rename to providers/tests/apache/drill/hooks/test_drill.py index bfedffd3d7983..a02b74545531a 100644 --- a/tests/providers/apache/drill/hooks/test_drill.py +++ b/providers/tests/apache/drill/hooks/test_drill.py @@ -26,9 +26,10 @@ @pytest.mark.parametrize("host, expect_error", [("host_with?", True), ("good_host", False)]) def test_get_host(host, expect_error): - with patch( - "airflow.providers.apache.drill.hooks.drill.DrillHook.get_connection" - ) as mock_get_connection, patch("sqlalchemy.engine.base.Engine.raw_connection") as raw_connection: + with ( + patch("airflow.providers.apache.drill.hooks.drill.DrillHook.get_connection") as mock_get_connection, + patch("sqlalchemy.engine.base.Engine.raw_connection") as raw_connection, + ): raw_connection.return_value = MagicMock() mock_get_connection.return_value = MagicMock( host=host, port=80, login="drill_user", password="secret" diff --git a/tests/providers/apache/drill/__init__.py b/providers/tests/apache/druid/__init__.py similarity index 100% rename from tests/providers/apache/drill/__init__.py rename to providers/tests/apache/druid/__init__.py diff --git a/tests/providers/apache/drill/hooks/__init__.py b/providers/tests/apache/druid/hooks/__init__.py similarity index 100% rename from tests/providers/apache/drill/hooks/__init__.py rename to providers/tests/apache/druid/hooks/__init__.py diff --git a/tests/providers/apache/druid/hooks/test_druid.py b/providers/tests/apache/druid/hooks/test_druid.py similarity index 100% rename from tests/providers/apache/druid/hooks/test_druid.py rename to providers/tests/apache/druid/hooks/test_druid.py diff --git a/tests/providers/apache/druid/__init__.py b/providers/tests/apache/druid/operators/__init__.py similarity index 100% rename from tests/providers/apache/druid/__init__.py rename to providers/tests/apache/druid/operators/__init__.py diff --git a/tests/providers/apache/druid/operators/test_druid.py b/providers/tests/apache/druid/operators/test_druid.py similarity index 100% rename from tests/providers/apache/druid/operators/test_druid.py rename to providers/tests/apache/druid/operators/test_druid.py diff --git a/tests/providers/alibaba/cloud/utils/__init__.py b/providers/tests/apache/druid/transfers/__init__.py similarity index 100% rename from tests/providers/alibaba/cloud/utils/__init__.py rename to providers/tests/apache/druid/transfers/__init__.py diff --git a/tests/providers/apache/druid/transfers/test_hive_to_druid.py b/providers/tests/apache/druid/transfers/test_hive_to_druid.py similarity index 100% rename from tests/providers/apache/druid/transfers/test_hive_to_druid.py rename to providers/tests/apache/druid/transfers/test_hive_to_druid.py diff --git a/tests/providers/apache/druid/hooks/__init__.py b/providers/tests/apache/flink/__init__.py similarity index 100% rename from tests/providers/apache/druid/hooks/__init__.py rename to providers/tests/apache/flink/__init__.py diff --git a/tests/providers/apache/druid/operators/__init__.py b/providers/tests/apache/flink/operators/__init__.py similarity index 100% rename from tests/providers/apache/druid/operators/__init__.py rename to providers/tests/apache/flink/operators/__init__.py diff --git a/tests/providers/apache/flink/operators/test_flink_kubernetes.py b/providers/tests/apache/flink/operators/test_flink_kubernetes.py similarity index 100% rename from tests/providers/apache/flink/operators/test_flink_kubernetes.py rename to providers/tests/apache/flink/operators/test_flink_kubernetes.py diff --git a/tests/providers/apache/flink/__init__.py b/providers/tests/apache/flink/sensors/__init__.py similarity index 100% rename from tests/providers/apache/flink/__init__.py rename to providers/tests/apache/flink/sensors/__init__.py diff --git a/tests/providers/apache/flink/sensors/test_flink_kubernetes.py b/providers/tests/apache/flink/sensors/test_flink_kubernetes.py similarity index 100% rename from tests/providers/apache/flink/sensors/test_flink_kubernetes.py rename to providers/tests/apache/flink/sensors/test_flink_kubernetes.py diff --git a/tests/providers/apache/flink/operators/__init__.py b/providers/tests/apache/hdfs/__init__.py similarity index 100% rename from tests/providers/apache/flink/operators/__init__.py rename to providers/tests/apache/hdfs/__init__.py diff --git a/tests/providers/apache/flink/sensors/__init__.py b/providers/tests/apache/hdfs/hooks/__init__.py similarity index 100% rename from tests/providers/apache/flink/sensors/__init__.py rename to providers/tests/apache/hdfs/hooks/__init__.py diff --git a/tests/providers/apache/hdfs/hooks/test_webhdfs.py b/providers/tests/apache/hdfs/hooks/test_webhdfs.py similarity index 100% rename from tests/providers/apache/hdfs/hooks/test_webhdfs.py rename to providers/tests/apache/hdfs/hooks/test_webhdfs.py diff --git a/tests/providers/apache/hdfs/__init__.py b/providers/tests/apache/hdfs/sensors/__init__.py similarity index 100% rename from tests/providers/apache/hdfs/__init__.py rename to providers/tests/apache/hdfs/sensors/__init__.py diff --git a/tests/providers/apache/hdfs/sensors/test_web_hdfs.py b/providers/tests/apache/hdfs/sensors/test_web_hdfs.py similarity index 100% rename from tests/providers/apache/hdfs/sensors/test_web_hdfs.py rename to providers/tests/apache/hdfs/sensors/test_web_hdfs.py diff --git a/tests/providers/apache/hive/__init__.py b/providers/tests/apache/hive/__init__.py similarity index 100% rename from tests/providers/apache/hive/__init__.py rename to providers/tests/apache/hive/__init__.py diff --git a/tests/providers/apache/hdfs/hooks/__init__.py b/providers/tests/apache/hive/hooks/__init__.py similarity index 100% rename from tests/providers/apache/hdfs/hooks/__init__.py rename to providers/tests/apache/hive/hooks/__init__.py diff --git a/tests/providers/apache/hive/hooks/query_results.csv b/providers/tests/apache/hive/hooks/query_results.csv similarity index 100% rename from tests/providers/apache/hive/hooks/query_results.csv rename to providers/tests/apache/hive/hooks/query_results.csv diff --git a/tests/providers/apache/hive/hooks/test_hive.py b/providers/tests/apache/hive/hooks/test_hive.py similarity index 98% rename from tests/providers/apache/hive/hooks/test_hive.py rename to providers/tests/apache/hive/hooks/test_hive.py index 93494876175ad..aee09db28088e 100644 --- a/tests/providers/apache/hive/hooks/test_hive.py +++ b/providers/tests/apache/hive/hooks/test_hive.py @@ -33,14 +33,15 @@ from airflow.secrets.environment_variables import CONN_ENV_PREFIX from airflow.utils import timezone from airflow.utils.operator_helpers import AIRFLOW_VAR_NAME_FORMAT_MAPPING -from tests.providers.apache.hive import ( + +from dev.tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces +from providers.tests.apache.hive import ( BaseMockConnectionCursor, InvalidHiveCliHook, MockHiveCliHook, MockHiveServer2Hook, MockSubProcess, ) -from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces DEFAULT_DATE = timezone.datetime(2015, 1, 1) DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat() @@ -333,10 +334,11 @@ def setup_method(self): self.database = "airflow" self.partition_by = "ds" self.table = "static_babynames_partitioned" - with mock.patch( - "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_metastore_client" - ) as get_metastore_mock, mock.patch( - "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_connection" + with ( + mock.patch( + "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_metastore_client" + ) as get_metastore_mock, + mock.patch("airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_connection"), ): get_metastore_mock.return_value = mock.MagicMock() @@ -419,11 +421,14 @@ def test_ha_hosts(self, socket_mock): assert socket_mock.socket.call_count == 2 def test_get_conn(self): - with mock.patch( - "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook._find_valid_host" - ) as find_valid_host, mock.patch( - "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_connection" - ) as get_connection: + with ( + mock.patch( + "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook._find_valid_host" + ) as find_valid_host, + mock.patch( + "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_connection" + ) as get_connection, + ): find_valid_host.return_value = mock.MagicMock(return_value="") get_connection.return_value = mock.MagicMock(return_value="") metastore_hook = HiveMetastoreHook() diff --git a/tests/providers/apache/hdfs/sensors/__init__.py b/providers/tests/apache/hive/macros/__init__.py similarity index 100% rename from tests/providers/apache/hdfs/sensors/__init__.py rename to providers/tests/apache/hive/macros/__init__.py diff --git a/tests/providers/apache/hive/macros/test_hive.py b/providers/tests/apache/hive/macros/test_hive.py similarity index 100% rename from tests/providers/apache/hive/macros/test_hive.py rename to providers/tests/apache/hive/macros/test_hive.py diff --git a/tests/providers/apache/hive/hooks/__init__.py b/providers/tests/apache/hive/operators/__init__.py similarity index 100% rename from tests/providers/apache/hive/hooks/__init__.py rename to providers/tests/apache/hive/operators/__init__.py diff --git a/tests/providers/apache/hive/operators/test_hive.py b/providers/tests/apache/hive/operators/test_hive.py similarity index 99% rename from tests/providers/apache/hive/operators/test_hive.py rename to providers/tests/apache/hive/operators/test_hive.py index f02f69c2a482d..cda19f9c2a05c 100644 --- a/tests/providers/apache/hive/operators/test_hive.py +++ b/providers/tests/apache/hive/operators/test_hive.py @@ -26,7 +26,8 @@ from airflow.models import DagRun, TaskInstance from airflow.providers.apache.hive.operators.hive import HiveOperator from airflow.utils import timezone -from tests.providers.apache.hive import DEFAULT_DATE, MockSubProcess, TestHiveEnvironment + +from providers.tests.apache.hive import DEFAULT_DATE, MockSubProcess, TestHiveEnvironment class HiveOperatorConfigTest(TestHiveEnvironment): diff --git a/tests/providers/apache/hive/operators/test_hive_stats.py b/providers/tests/apache/hive/operators/test_hive_stats.py similarity index 99% rename from tests/providers/apache/hive/operators/test_hive_stats.py rename to providers/tests/apache/hive/operators/test_hive_stats.py index e419d2da00b92..cce576515cb68 100644 --- a/tests/providers/apache/hive/operators/test_hive_stats.py +++ b/providers/tests/apache/hive/operators/test_hive_stats.py @@ -26,7 +26,8 @@ from airflow.exceptions import AirflowException from airflow.providers.apache.hive.operators.hive_stats import HiveStatsCollectionOperator from airflow.providers.presto.hooks.presto import PrestoHook -from tests.providers.apache.hive import ( + +from providers.tests.apache.hive import ( DEFAULT_DATE, DEFAULT_DATE_DS, MockConnectionCursor, diff --git a/tests/providers/apache/hive/macros/__init__.py b/providers/tests/apache/hive/sensors/__init__.py similarity index 100% rename from tests/providers/apache/hive/macros/__init__.py rename to providers/tests/apache/hive/sensors/__init__.py diff --git a/tests/providers/apache/hive/sensors/test_hive_partition.py b/providers/tests/apache/hive/sensors/test_hive_partition.py similarity index 96% rename from tests/providers/apache/hive/sensors/test_hive_partition.py rename to providers/tests/apache/hive/sensors/test_hive_partition.py index 45e10783d81ea..4df701bfe697b 100644 --- a/tests/providers/apache/hive/sensors/test_hive_partition.py +++ b/providers/tests/apache/hive/sensors/test_hive_partition.py @@ -23,7 +23,8 @@ import pytest from airflow.providers.apache.hive.sensors.hive_partition import HivePartitionSensor -from tests.providers.apache.hive import DEFAULT_DATE, MockHiveMetastoreHook, TestHiveEnvironment + +from providers.tests.apache.hive import DEFAULT_DATE, MockHiveMetastoreHook, TestHiveEnvironment @pytest.mark.skipif( diff --git a/tests/providers/apache/hive/sensors/test_metastore_partition.py b/providers/tests/apache/hive/sensors/test_metastore_partition.py similarity index 96% rename from tests/providers/apache/hive/sensors/test_metastore_partition.py rename to providers/tests/apache/hive/sensors/test_metastore_partition.py index 04a251339dc30..3acb19678a5d8 100644 --- a/tests/providers/apache/hive/sensors/test_metastore_partition.py +++ b/providers/tests/apache/hive/sensors/test_metastore_partition.py @@ -23,7 +23,8 @@ import pytest from airflow.providers.apache.hive.sensors.metastore_partition import MetastorePartitionSensor -from tests.providers.apache.hive import DEFAULT_DATE, DEFAULT_DATE_DS, MockDBConnection, TestHiveEnvironment + +from providers.tests.apache.hive import DEFAULT_DATE, DEFAULT_DATE_DS, MockDBConnection, TestHiveEnvironment @pytest.mark.skipif( diff --git a/tests/providers/apache/hive/sensors/test_named_hive_partition.py b/providers/tests/apache/hive/sensors/test_named_hive_partition.py similarity index 99% rename from tests/providers/apache/hive/sensors/test_named_hive_partition.py rename to providers/tests/apache/hive/sensors/test_named_hive_partition.py index 01827692273a6..ff565c733d8bf 100644 --- a/tests/providers/apache/hive/sensors/test_named_hive_partition.py +++ b/providers/tests/apache/hive/sensors/test_named_hive_partition.py @@ -27,7 +27,8 @@ from airflow.models.dag import DAG from airflow.providers.apache.hive.sensors.named_hive_partition import NamedHivePartitionSensor from airflow.utils.timezone import datetime -from tests.providers.apache.hive import MockHiveMetastoreHook, TestHiveEnvironment + +from providers.tests.apache.hive import MockHiveMetastoreHook, TestHiveEnvironment DEFAULT_DATE = datetime(2015, 1, 1) DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat() diff --git a/tests/providers/amazon/__init__.py b/providers/tests/apache/hive/transfers/__init__.py similarity index 100% rename from tests/providers/amazon/__init__.py rename to providers/tests/apache/hive/transfers/__init__.py diff --git a/tests/providers/apache/hive/transfers/test_hive_to_mysql.py b/providers/tests/apache/hive/transfers/test_hive_to_mysql.py similarity index 99% rename from tests/providers/apache/hive/transfers/test_hive_to_mysql.py rename to providers/tests/apache/hive/transfers/test_hive_to_mysql.py index a7e2cbe0e0d6d..f7a4e9c013013 100644 --- a/tests/providers/apache/hive/transfers/test_hive_to_mysql.py +++ b/providers/tests/apache/hive/transfers/test_hive_to_mysql.py @@ -26,7 +26,8 @@ from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator from airflow.utils import timezone from airflow.utils.operator_helpers import context_to_airflow_vars -from tests.providers.apache.hive import MockHiveServer2Hook, MockMySqlHook, TestHiveEnvironment + +from providers.tests.apache.hive import MockHiveServer2Hook, MockMySqlHook, TestHiveEnvironment DEFAULT_DATE = timezone.datetime(2015, 1, 1) diff --git a/tests/providers/apache/hive/transfers/test_hive_to_samba.py b/providers/tests/apache/hive/transfers/test_hive_to_samba.py similarity index 99% rename from tests/providers/apache/hive/transfers/test_hive_to_samba.py rename to providers/tests/apache/hive/transfers/test_hive_to_samba.py index 7eb50e44b337e..7225e732db812 100644 --- a/tests/providers/apache/hive/transfers/test_hive_to_samba.py +++ b/providers/tests/apache/hive/transfers/test_hive_to_samba.py @@ -25,7 +25,8 @@ from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator from airflow.providers.samba.hooks.samba import SambaHook from airflow.utils.operator_helpers import context_to_airflow_vars -from tests.providers.apache.hive import ( + +from providers.tests.apache.hive import ( DEFAULT_DATE, MockConnectionCursor, MockHiveServer2Hook, diff --git a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py b/providers/tests/apache/hive/transfers/test_mssql_to_hive.py similarity index 100% rename from tests/providers/apache/hive/transfers/test_mssql_to_hive.py rename to providers/tests/apache/hive/transfers/test_mssql_to_hive.py diff --git a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py b/providers/tests/apache/hive/transfers/test_mysql_to_hive.py similarity index 100% rename from tests/providers/apache/hive/transfers/test_mysql_to_hive.py rename to providers/tests/apache/hive/transfers/test_mysql_to_hive.py diff --git a/tests/providers/apache/hive/transfers/test_s3_to_hive.py b/providers/tests/apache/hive/transfers/test_s3_to_hive.py similarity index 100% rename from tests/providers/apache/hive/transfers/test_s3_to_hive.py rename to providers/tests/apache/hive/transfers/test_s3_to_hive.py diff --git a/tests/providers/apache/hive/transfers/test_vertica_to_hive.py b/providers/tests/apache/hive/transfers/test_vertica_to_hive.py similarity index 100% rename from tests/providers/apache/hive/transfers/test_vertica_to_hive.py rename to providers/tests/apache/hive/transfers/test_vertica_to_hive.py diff --git a/tests/providers/amazon/aws/__init__.py b/providers/tests/apache/iceberg/__init__.py similarity index 100% rename from tests/providers/amazon/aws/__init__.py rename to providers/tests/apache/iceberg/__init__.py diff --git a/tests/providers/amazon/aws/assets/__init__.py b/providers/tests/apache/iceberg/hooks/__init__.py similarity index 100% rename from tests/providers/amazon/aws/assets/__init__.py rename to providers/tests/apache/iceberg/hooks/__init__.py diff --git a/tests/providers/apache/iceberg/hooks/test_iceberg.py b/providers/tests/apache/iceberg/hooks/test_iceberg.py similarity index 100% rename from tests/providers/apache/iceberg/hooks/test_iceberg.py rename to providers/tests/apache/iceberg/hooks/test_iceberg.py diff --git a/tests/providers/amazon/aws/auth_manager/__init__.py b/providers/tests/apache/impala/__init__.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/__init__.py rename to providers/tests/apache/impala/__init__.py diff --git a/tests/providers/amazon/aws/auth_manager/avp/__init__.py b/providers/tests/apache/impala/hooks/__init__.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/avp/__init__.py rename to providers/tests/apache/impala/hooks/__init__.py diff --git a/tests/providers/apache/impala/hooks/test_impala.py b/providers/tests/apache/impala/hooks/test_impala.py similarity index 100% rename from tests/providers/apache/impala/hooks/test_impala.py rename to providers/tests/apache/impala/hooks/test_impala.py diff --git a/tests/providers/amazon/aws/auth_manager/cli/__init__.py b/providers/tests/apache/kafka/__init__.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/cli/__init__.py rename to providers/tests/apache/kafka/__init__.py diff --git a/tests/providers/amazon/aws/auth_manager/security_manager/__init__.py b/providers/tests/apache/kafka/hooks/__init__.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/security_manager/__init__.py rename to providers/tests/apache/kafka/hooks/__init__.py diff --git a/tests/providers/apache/kafka/hooks/test_base.py b/providers/tests/apache/kafka/hooks/test_base.py similarity index 100% rename from tests/providers/apache/kafka/hooks/test_base.py rename to providers/tests/apache/kafka/hooks/test_base.py diff --git a/tests/providers/apache/kafka/hooks/test_client.py b/providers/tests/apache/kafka/hooks/test_client.py similarity index 100% rename from tests/providers/apache/kafka/hooks/test_client.py rename to providers/tests/apache/kafka/hooks/test_client.py diff --git a/tests/providers/apache/kafka/hooks/test_consume.py b/providers/tests/apache/kafka/hooks/test_consume.py similarity index 100% rename from tests/providers/apache/kafka/hooks/test_consume.py rename to providers/tests/apache/kafka/hooks/test_consume.py diff --git a/tests/providers/apache/kafka/hooks/test_produce.py b/providers/tests/apache/kafka/hooks/test_produce.py similarity index 100% rename from tests/providers/apache/kafka/hooks/test_produce.py rename to providers/tests/apache/kafka/hooks/test_produce.py diff --git a/tests/providers/amazon/aws/auth_manager/views/__init__.py b/providers/tests/apache/kafka/operators/__init__.py similarity index 100% rename from tests/providers/amazon/aws/auth_manager/views/__init__.py rename to providers/tests/apache/kafka/operators/__init__.py diff --git a/tests/providers/apache/kafka/operators/test_consume.py b/providers/tests/apache/kafka/operators/test_consume.py similarity index 98% rename from tests/providers/apache/kafka/operators/test_consume.py rename to providers/tests/apache/kafka/operators/test_consume.py index 699e0abe0643c..7e02a273894b3 100644 --- a/tests/providers/apache/kafka/operators/test_consume.py +++ b/providers/tests/apache/kafka/operators/test_consume.py @@ -64,7 +64,7 @@ def test_operator(self): operator = ConsumeFromTopicOperator( kafka_config_id="kafka_d", topics=["test"], - apply_function="tests.providers.apache.kafka.operators.test_consume._no_op", + apply_function="providers.tests.apache.kafka.operators.test_consume._no_op", task_id="test", poll_timeout=0.0001, ) diff --git a/tests/providers/apache/kafka/operators/test_produce.py b/providers/tests/apache/kafka/operators/test_produce.py similarity index 97% rename from tests/providers/apache/kafka/operators/test_produce.py rename to providers/tests/apache/kafka/operators/test_produce.py index 49559984f1d0a..6c1b853ecef90 100644 --- a/tests/providers/apache/kafka/operators/test_produce.py +++ b/providers/tests/apache/kafka/operators/test_produce.py @@ -69,7 +69,7 @@ def test_operator_string(self): operator = ProduceToTopicOperator( kafka_config_id="kafka_d", topic="test_1", - producer_function="tests.providers.apache.kafka.operators.test_produce._simple_producer", + producer_function="providers.tests.apache.kafka.operators.test_produce._simple_producer", producer_function_args=(b"test", b"test"), task_id="test", synchronous=False, diff --git a/tests/providers/amazon/aws/config_templates/__init__.py b/providers/tests/apache/kafka/sensors/__init__.py similarity index 100% rename from tests/providers/amazon/aws/config_templates/__init__.py rename to providers/tests/apache/kafka/sensors/__init__.py diff --git a/tests/providers/apache/kafka/sensors/test_kafka.py b/providers/tests/apache/kafka/sensors/test_kafka.py similarity index 100% rename from tests/providers/apache/kafka/sensors/test_kafka.py rename to providers/tests/apache/kafka/sensors/test_kafka.py diff --git a/tests/providers/amazon/aws/executors/__init__.py b/providers/tests/apache/kafka/triggers/__init__.py similarity index 100% rename from tests/providers/amazon/aws/executors/__init__.py rename to providers/tests/apache/kafka/triggers/__init__.py diff --git a/tests/providers/apache/kafka/triggers/test_await_message.py b/providers/tests/apache/kafka/triggers/test_await_message.py similarity index 96% rename from tests/providers/apache/kafka/triggers/test_await_message.py rename to providers/tests/apache/kafka/triggers/test_await_message.py index db17d5e4e758c..0f2b3f6f6b074 100644 --- a/tests/providers/apache/kafka/triggers/test_await_message.py +++ b/providers/tests/apache/kafka/triggers/test_await_message.py @@ -100,7 +100,7 @@ async def test_trigger_run_good(self, mocker): trigger = AwaitMessageTrigger( kafka_config_id="kafka_d", - apply_function="tests.providers.apache.kafka.triggers.test_await_message.apply_function_true", + apply_function="providers.tests.apache.kafka.triggers.test_await_message.apply_function_true", topics=["noop"], poll_timeout=0.0001, poll_interval=5, @@ -117,7 +117,7 @@ async def test_trigger_run_bad(self, mocker): trigger = AwaitMessageTrigger( kafka_config_id="kafka_d", - apply_function="tests.providers.apache.kafka.triggers.test_await_message.apply_function_false", + apply_function="providers.tests.apache.kafka.triggers.test_await_message.apply_function_false", topics=["noop"], poll_timeout=0.0001, poll_interval=5, diff --git a/tests/providers/apache/hive/operators/__init__.py b/providers/tests/apache/kylin/__init__.py similarity index 100% rename from tests/providers/apache/hive/operators/__init__.py rename to providers/tests/apache/kylin/__init__.py diff --git a/tests/providers/apache/hive/sensors/__init__.py b/providers/tests/apache/kylin/hooks/__init__.py similarity index 100% rename from tests/providers/apache/hive/sensors/__init__.py rename to providers/tests/apache/kylin/hooks/__init__.py diff --git a/tests/providers/apache/kylin/hooks/test_kylin.py b/providers/tests/apache/kylin/hooks/test_kylin.py similarity index 100% rename from tests/providers/apache/kylin/hooks/test_kylin.py rename to providers/tests/apache/kylin/hooks/test_kylin.py diff --git a/tests/providers/apache/kylin/__init__.py b/providers/tests/apache/kylin/operators/__init__.py similarity index 100% rename from tests/providers/apache/kylin/__init__.py rename to providers/tests/apache/kylin/operators/__init__.py diff --git a/tests/providers/apache/kylin/operators/test_kylin_cube.py b/providers/tests/apache/kylin/operators/test_kylin_cube.py similarity index 100% rename from tests/providers/apache/kylin/operators/test_kylin_cube.py rename to providers/tests/apache/kylin/operators/test_kylin_cube.py diff --git a/tests/providers/amazon/aws/executors/batch/__init__.py b/providers/tests/apache/livy/__init__.py similarity index 100% rename from tests/providers/amazon/aws/executors/batch/__init__.py rename to providers/tests/apache/livy/__init__.py diff --git a/tests/providers/amazon/aws/executors/ecs/__init__.py b/providers/tests/apache/livy/hooks/__init__.py similarity index 100% rename from tests/providers/amazon/aws/executors/ecs/__init__.py rename to providers/tests/apache/livy/hooks/__init__.py diff --git a/tests/providers/apache/livy/hooks/test_livy.py b/providers/tests/apache/livy/hooks/test_livy.py similarity index 99% rename from tests/providers/apache/livy/hooks/test_livy.py rename to providers/tests/apache/livy/hooks/test_livy.py index d3c110e68b4b0..0fca347b06a99 100644 --- a/tests/providers/apache/livy/hooks/test_livy.py +++ b/providers/tests/apache/livy/hooks/test_livy.py @@ -30,7 +30,8 @@ from airflow.models import Connection from airflow.providers.apache.livy.hooks.livy import BatchState, LivyAsyncHook, LivyHook from airflow.utils import db -from tests.test_utils.db import clear_db_connections + +from dev.tests_common.test_utils.db import clear_db_connections pytestmark = pytest.mark.skip_if_database_isolation_mode diff --git a/tests/providers/amazon/aws/executors/utils/__init__.py b/providers/tests/apache/livy/operators/__init__.py similarity index 100% rename from tests/providers/amazon/aws/executors/utils/__init__.py rename to providers/tests/apache/livy/operators/__init__.py diff --git a/tests/providers/apache/livy/operators/test_livy.py b/providers/tests/apache/livy/operators/test_livy.py similarity index 100% rename from tests/providers/apache/livy/operators/test_livy.py rename to providers/tests/apache/livy/operators/test_livy.py diff --git a/tests/providers/amazon/aws/fs/__init__.py b/providers/tests/apache/livy/sensors/__init__.py similarity index 100% rename from tests/providers/amazon/aws/fs/__init__.py rename to providers/tests/apache/livy/sensors/__init__.py diff --git a/tests/providers/apache/livy/sensors/test_livy.py b/providers/tests/apache/livy/sensors/test_livy.py similarity index 100% rename from tests/providers/apache/livy/sensors/test_livy.py rename to providers/tests/apache/livy/sensors/test_livy.py diff --git a/tests/providers/amazon/aws/hooks/__init__.py b/providers/tests/apache/livy/triggers/__init__.py similarity index 100% rename from tests/providers/amazon/aws/hooks/__init__.py rename to providers/tests/apache/livy/triggers/__init__.py diff --git a/tests/providers/apache/livy/triggers/test_livy.py b/providers/tests/apache/livy/triggers/test_livy.py similarity index 100% rename from tests/providers/apache/livy/triggers/test_livy.py rename to providers/tests/apache/livy/triggers/test_livy.py diff --git a/tests/providers/apache/kylin/hooks/__init__.py b/providers/tests/apache/pig/__init__.py similarity index 100% rename from tests/providers/apache/kylin/hooks/__init__.py rename to providers/tests/apache/pig/__init__.py diff --git a/tests/providers/apache/kylin/operators/__init__.py b/providers/tests/apache/pig/hooks/__init__.py similarity index 100% rename from tests/providers/apache/kylin/operators/__init__.py rename to providers/tests/apache/pig/hooks/__init__.py diff --git a/tests/providers/apache/pig/hooks/test_pig.py b/providers/tests/apache/pig/hooks/test_pig.py similarity index 100% rename from tests/providers/apache/pig/hooks/test_pig.py rename to providers/tests/apache/pig/hooks/test_pig.py diff --git a/tests/providers/amazon/aws/infrastructure/__init__.py b/providers/tests/apache/pig/operators/__init__.py similarity index 100% rename from tests/providers/amazon/aws/infrastructure/__init__.py rename to providers/tests/apache/pig/operators/__init__.py diff --git a/tests/providers/apache/pig/operators/test_pig.py b/providers/tests/apache/pig/operators/test_pig.py similarity index 100% rename from tests/providers/apache/pig/operators/test_pig.py rename to providers/tests/apache/pig/operators/test_pig.py diff --git a/tests/providers/apache/pig/__init__.py b/providers/tests/apache/pinot/__init__.py similarity index 100% rename from tests/providers/apache/pig/__init__.py rename to providers/tests/apache/pinot/__init__.py diff --git a/tests/providers/apache/pig/hooks/__init__.py b/providers/tests/apache/pinot/hooks/__init__.py similarity index 100% rename from tests/providers/apache/pig/hooks/__init__.py rename to providers/tests/apache/pinot/hooks/__init__.py diff --git a/tests/providers/apache/pinot/hooks/test_pinot.py b/providers/tests/apache/pinot/hooks/test_pinot.py similarity index 100% rename from tests/providers/apache/pinot/hooks/test_pinot.py rename to providers/tests/apache/pinot/hooks/test_pinot.py diff --git a/tests/providers/apache/pinot/__init__.py b/providers/tests/apache/spark/__init__.py similarity index 100% rename from tests/providers/apache/pinot/__init__.py rename to providers/tests/apache/spark/__init__.py diff --git a/tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/__init__.py b/providers/tests/apache/spark/decorators/__init__.py similarity index 100% rename from tests/providers/amazon/aws/infrastructure/example_s3_to_redshift/__init__.py rename to providers/tests/apache/spark/decorators/__init__.py diff --git a/tests/providers/apache/spark/decorators/test_pyspark.py b/providers/tests/apache/spark/decorators/test_pyspark.py similarity index 100% rename from tests/providers/apache/spark/decorators/test_pyspark.py rename to providers/tests/apache/spark/decorators/test_pyspark.py diff --git a/tests/providers/apache/pinot/hooks/__init__.py b/providers/tests/apache/spark/hooks/__init__.py similarity index 100% rename from tests/providers/apache/pinot/hooks/__init__.py rename to providers/tests/apache/spark/hooks/__init__.py diff --git a/tests/providers/apache/spark/hooks/test_spark_connect.py b/providers/tests/apache/spark/hooks/test_spark_connect.py similarity index 100% rename from tests/providers/apache/spark/hooks/test_spark_connect.py rename to providers/tests/apache/spark/hooks/test_spark_connect.py diff --git a/tests/providers/apache/spark/hooks/test_spark_jdbc.py b/providers/tests/apache/spark/hooks/test_spark_jdbc.py similarity index 100% rename from tests/providers/apache/spark/hooks/test_spark_jdbc.py rename to providers/tests/apache/spark/hooks/test_spark_jdbc.py diff --git a/tests/providers/apache/spark/hooks/test_spark_jdbc_script.py b/providers/tests/apache/spark/hooks/test_spark_jdbc_script.py similarity index 100% rename from tests/providers/apache/spark/hooks/test_spark_jdbc_script.py rename to providers/tests/apache/spark/hooks/test_spark_jdbc_script.py diff --git a/tests/providers/apache/spark/hooks/test_spark_sql.py b/providers/tests/apache/spark/hooks/test_spark_sql.py similarity index 99% rename from tests/providers/apache/spark/hooks/test_spark_sql.py rename to providers/tests/apache/spark/hooks/test_spark_sql.py index 9a33bfc2923d9..10f63cce99326 100644 --- a/tests/providers/apache/spark/hooks/test_spark_sql.py +++ b/providers/tests/apache/spark/hooks/test_spark_sql.py @@ -27,7 +27,8 @@ from airflow.models import Connection from airflow.providers.apache.spark.hooks.spark_sql import SparkSqlHook from airflow.utils import db -from tests.test_utils.db import clear_db_connections + +from dev.tests_common.test_utils.db import clear_db_connections pytestmark = pytest.mark.db_test diff --git a/tests/providers/apache/spark/hooks/test_spark_submit.py b/providers/tests/apache/spark/hooks/test_spark_submit.py similarity index 100% rename from tests/providers/apache/spark/hooks/test_spark_submit.py rename to providers/tests/apache/spark/hooks/test_spark_submit.py diff --git a/tests/providers/apache/spark/__init__.py b/providers/tests/apache/spark/operators/__init__.py similarity index 100% rename from tests/providers/apache/spark/__init__.py rename to providers/tests/apache/spark/operators/__init__.py diff --git a/tests/providers/apache/spark/operators/test_spark_jdbc.py b/providers/tests/apache/spark/operators/test_spark_jdbc.py similarity index 100% rename from tests/providers/apache/spark/operators/test_spark_jdbc.py rename to providers/tests/apache/spark/operators/test_spark_jdbc.py diff --git a/tests/providers/apache/spark/operators/test_spark_sql.py b/providers/tests/apache/spark/operators/test_spark_sql.py similarity index 100% rename from tests/providers/apache/spark/operators/test_spark_sql.py rename to providers/tests/apache/spark/operators/test_spark_sql.py diff --git a/tests/providers/apache/spark/operators/test_spark_submit.py b/providers/tests/apache/spark/operators/test_spark_submit.py similarity index 100% rename from tests/providers/apache/spark/operators/test_spark_submit.py rename to providers/tests/apache/spark/operators/test_spark_submit.py diff --git a/tests/providers/amazon/aws/links/__init__.py b/providers/tests/apprise/__init__.py similarity index 100% rename from tests/providers/amazon/aws/links/__init__.py rename to providers/tests/apprise/__init__.py diff --git a/tests/providers/apache/spark/hooks/__init__.py b/providers/tests/apprise/hooks/__init__.py similarity index 100% rename from tests/providers/apache/spark/hooks/__init__.py rename to providers/tests/apprise/hooks/__init__.py diff --git a/tests/providers/apprise/hooks/test_apprise.py b/providers/tests/apprise/hooks/test_apprise.py similarity index 100% rename from tests/providers/apprise/hooks/test_apprise.py rename to providers/tests/apprise/hooks/test_apprise.py diff --git a/tests/providers/apache/spark/operators/__init__.py b/providers/tests/apprise/notifications/__init__.py similarity index 100% rename from tests/providers/apache/spark/operators/__init__.py rename to providers/tests/apprise/notifications/__init__.py diff --git a/tests/providers/apprise/notifications/test_apprise.py b/providers/tests/apprise/notifications/test_apprise.py similarity index 100% rename from tests/providers/apprise/notifications/test_apprise.py rename to providers/tests/apprise/notifications/test_apprise.py diff --git a/tests/providers/amazon/aws/log/__init__.py b/providers/tests/arangodb/__init__.py similarity index 100% rename from tests/providers/amazon/aws/log/__init__.py rename to providers/tests/arangodb/__init__.py diff --git a/tests/providers/amazon/aws/notifications/__init__.py b/providers/tests/arangodb/hooks/__init__.py similarity index 100% rename from tests/providers/amazon/aws/notifications/__init__.py rename to providers/tests/arangodb/hooks/__init__.py diff --git a/tests/providers/arangodb/hooks/test_arangodb.py b/providers/tests/arangodb/hooks/test_arangodb.py similarity index 100% rename from tests/providers/arangodb/hooks/test_arangodb.py rename to providers/tests/arangodb/hooks/test_arangodb.py diff --git a/tests/providers/amazon/aws/operators/__init__.py b/providers/tests/arangodb/operators/__init__.py similarity index 100% rename from tests/providers/amazon/aws/operators/__init__.py rename to providers/tests/arangodb/operators/__init__.py diff --git a/tests/providers/arangodb/operators/test_arangodb.py b/providers/tests/arangodb/operators/test_arangodb.py similarity index 100% rename from tests/providers/arangodb/operators/test_arangodb.py rename to providers/tests/arangodb/operators/test_arangodb.py diff --git a/tests/providers/amazon/aws/secrets/__init__.py b/providers/tests/arangodb/sensors/__init__.py similarity index 100% rename from tests/providers/amazon/aws/secrets/__init__.py rename to providers/tests/arangodb/sensors/__init__.py diff --git a/tests/providers/arangodb/sensors/test_arangodb.py b/providers/tests/arangodb/sensors/test_arangodb.py similarity index 100% rename from tests/providers/arangodb/sensors/test_arangodb.py rename to providers/tests/arangodb/sensors/test_arangodb.py diff --git a/tests/providers/apprise/hooks/__init__.py b/providers/tests/asana/__init__.py similarity index 100% rename from tests/providers/apprise/hooks/__init__.py rename to providers/tests/asana/__init__.py diff --git a/tests/providers/apprise/notifications/__init__.py b/providers/tests/asana/hooks/__init__.py similarity index 100% rename from tests/providers/apprise/notifications/__init__.py rename to providers/tests/asana/hooks/__init__.py diff --git a/tests/providers/asana/hooks/test_asana.py b/providers/tests/asana/hooks/test_asana.py similarity index 100% rename from tests/providers/asana/hooks/test_asana.py rename to providers/tests/asana/hooks/test_asana.py diff --git a/tests/providers/asana/__init__.py b/providers/tests/asana/operators/__init__.py similarity index 100% rename from tests/providers/asana/__init__.py rename to providers/tests/asana/operators/__init__.py diff --git a/tests/providers/asana/operators/test_asana_tasks.py b/providers/tests/asana/operators/test_asana_tasks.py similarity index 100% rename from tests/providers/asana/operators/test_asana_tasks.py rename to providers/tests/asana/operators/test_asana_tasks.py diff --git a/tests/providers/amazon/aws/sensors/__init__.py b/providers/tests/atlassian/__init__.py similarity index 100% rename from tests/providers/amazon/aws/sensors/__init__.py rename to providers/tests/atlassian/__init__.py diff --git a/tests/providers/amazon/aws/system/__init__.py b/providers/tests/atlassian/jira/__init__.py similarity index 100% rename from tests/providers/amazon/aws/system/__init__.py rename to providers/tests/atlassian/jira/__init__.py diff --git a/tests/providers/amazon/aws/system/utils/__init__.py b/providers/tests/atlassian/jira/hooks/__init__.py similarity index 100% rename from tests/providers/amazon/aws/system/utils/__init__.py rename to providers/tests/atlassian/jira/hooks/__init__.py diff --git a/tests/providers/atlassian/jira/hooks/test_jira.py b/providers/tests/atlassian/jira/hooks/test_jira.py similarity index 98% rename from tests/providers/atlassian/jira/hooks/test_jira.py rename to providers/tests/atlassian/jira/hooks/test_jira.py index 339274e317c52..e2cf9389471e7 100644 --- a/tests/providers/atlassian/jira/hooks/test_jira.py +++ b/providers/tests/atlassian/jira/hooks/test_jira.py @@ -24,7 +24,8 @@ from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.models import Connection from airflow.providers.atlassian.jira.hooks.jira import JiraHook -from tests.test_utils.compat import connection_as_json + +from dev.tests_common.test_utils.compat import connection_as_json @pytest.fixture diff --git a/tests/providers/amazon/aws/transfers/__init__.py b/providers/tests/atlassian/jira/notifications/__init__.py similarity index 100% rename from tests/providers/amazon/aws/transfers/__init__.py rename to providers/tests/atlassian/jira/notifications/__init__.py diff --git a/tests/providers/atlassian/jira/notifications/test_jira.py b/providers/tests/atlassian/jira/notifications/test_jira.py similarity index 100% rename from tests/providers/atlassian/jira/notifications/test_jira.py rename to providers/tests/atlassian/jira/notifications/test_jira.py diff --git a/tests/providers/amazon/aws/triggers/__init__.py b/providers/tests/atlassian/jira/operators/__init__.py similarity index 100% rename from tests/providers/amazon/aws/triggers/__init__.py rename to providers/tests/atlassian/jira/operators/__init__.py diff --git a/tests/providers/atlassian/jira/operators/test_jira.py b/providers/tests/atlassian/jira/operators/test_jira.py similarity index 98% rename from tests/providers/atlassian/jira/operators/test_jira.py rename to providers/tests/atlassian/jira/operators/test_jira.py index 9282089bae567..614c1a4939a90 100644 --- a/tests/providers/atlassian/jira/operators/test_jira.py +++ b/providers/tests/atlassian/jira/operators/test_jira.py @@ -24,7 +24,8 @@ from airflow.models import Connection from airflow.providers.atlassian.jira.operators.jira import JiraOperator from airflow.utils import timezone -from tests.test_utils.compat import connection_as_json + +from dev.tests_common.test_utils.compat import connection_as_json DEFAULT_DATE = timezone.datetime(2017, 1, 1) MINIMAL_TEST_TICKET = { diff --git a/tests/providers/amazon/aws/utils/__init__.py b/providers/tests/atlassian/jira/sensors/__init__.py similarity index 100% rename from tests/providers/amazon/aws/utils/__init__.py rename to providers/tests/atlassian/jira/sensors/__init__.py diff --git a/tests/providers/atlassian/jira/sensors/test_jira.py b/providers/tests/atlassian/jira/sensors/test_jira.py similarity index 97% rename from tests/providers/atlassian/jira/sensors/test_jira.py rename to providers/tests/atlassian/jira/sensors/test_jira.py index 5e2ce896b6c67..9cb2572969ad4 100644 --- a/tests/providers/atlassian/jira/sensors/test_jira.py +++ b/providers/tests/atlassian/jira/sensors/test_jira.py @@ -24,7 +24,8 @@ from airflow.models import Connection from airflow.providers.atlassian.jira.sensors.jira import JiraTicketSensor from airflow.utils import timezone -from tests.test_utils.compat import connection_as_json + +from dev.tests_common.test_utils.compat import connection_as_json DEFAULT_DATE = timezone.datetime(2017, 1, 1) MINIMAL_TEST_TICKET = { diff --git a/tests/providers/asana/hooks/__init__.py b/providers/tests/celery/__init__.py similarity index 100% rename from tests/providers/asana/hooks/__init__.py rename to providers/tests/celery/__init__.py diff --git a/tests/providers/amazon/aws/waiters/__init__.py b/providers/tests/celery/cli/__init__.py similarity index 100% rename from tests/providers/amazon/aws/waiters/__init__.py rename to providers/tests/celery/cli/__init__.py diff --git a/tests/providers/celery/cli/test_celery_command.py b/providers/tests/celery/cli/test_celery_command.py similarity index 99% rename from tests/providers/celery/cli/test_celery_command.py rename to providers/tests/celery/cli/test_celery_command.py index d7dcdc4741918..03f5d60dabe40 100644 --- a/tests/providers/celery/cli/test_celery_command.py +++ b/providers/tests/celery/cli/test_celery_command.py @@ -30,8 +30,9 @@ from airflow.configuration import conf from airflow.executors import executor_loader from airflow.providers.celery.cli import celery_command -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/asana/operators/__init__.py b/providers/tests/celery/executors/__init__.py similarity index 100% rename from tests/providers/asana/operators/__init__.py rename to providers/tests/celery/executors/__init__.py diff --git a/tests/providers/celery/executors/test_celery_executor.py b/providers/tests/celery/executors/test_celery_executor.py similarity index 96% rename from tests/providers/celery/executors/test_celery_executor.py rename to providers/tests/celery/executors/test_celery_executor.py index 63ca4a47d255e..261a11f837e42 100644 --- a/tests/providers/celery/executors/test_celery_executor.py +++ b/providers/tests/celery/executors/test_celery_executor.py @@ -41,9 +41,10 @@ from airflow.providers.celery.executors.celery_executor import CeleryExecutor from airflow.utils import timezone from airflow.utils.state import State -from tests.test_utils import db -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test @@ -165,11 +166,15 @@ def test_command_validation(self, command, raise_exception): ValueError, match=r'The command must start with \["airflow", "tasks", "run"\]\.' ) - with mock.patch( - "airflow.providers.celery.executors.celery_executor_utils._execute_in_subprocess" - ) as mock_subproc, mock.patch( - "airflow.providers.celery.executors.celery_executor_utils._execute_in_fork" - ) as mock_fork, mock.patch("celery.app.task.Task.request") as mock_task: + with ( + mock.patch( + "airflow.providers.celery.executors.celery_executor_utils._execute_in_subprocess" + ) as mock_subproc, + mock.patch( + "airflow.providers.celery.executors.celery_executor_utils._execute_in_fork" + ) as mock_fork, + mock.patch("celery.app.task.Task.request") as mock_task, + ): mock_task.id = "abcdef-124215-abcdef" with expected_context: celery_executor_utils.execute_command(command) diff --git a/tests/providers/celery/executors/test_celery_kubernetes_executor.py b/providers/tests/celery/executors/test_celery_kubernetes_executor.py similarity index 100% rename from tests/providers/celery/executors/test_celery_kubernetes_executor.py rename to providers/tests/celery/executors/test_celery_kubernetes_executor.py diff --git a/tests/providers/apache/__init__.py b/providers/tests/celery/log_handlers/__init__.py similarity index 100% rename from tests/providers/apache/__init__.py rename to providers/tests/celery/log_handlers/__init__.py diff --git a/tests/providers/celery/log_handlers/test_log_handlers.py b/providers/tests/celery/log_handlers/test_log_handlers.py similarity index 96% rename from tests/providers/celery/log_handlers/test_log_handlers.py rename to providers/tests/celery/log_handlers/test_log_handlers.py index 5e93b076e41ea..9eb9e33e2ae0d 100644 --- a/tests/providers/celery/log_handlers/test_log_handlers.py +++ b/providers/tests/celery/log_handlers/test_log_handlers.py @@ -35,8 +35,9 @@ from airflow.utils.state import TaskInstanceState from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars if AIRFLOW_V_3_0_PLUS: pass diff --git a/tests/providers/celery/__init__.py b/providers/tests/celery/sensors/__init__.py similarity index 100% rename from tests/providers/celery/__init__.py rename to providers/tests/celery/sensors/__init__.py diff --git a/tests/providers/celery/sensors/test_celery_queue.py b/providers/tests/celery/sensors/test_celery_queue.py similarity index 100% rename from tests/providers/celery/sensors/test_celery_queue.py rename to providers/tests/celery/sensors/test_celery_queue.py diff --git a/tests/providers/celery/executors/__init__.py b/providers/tests/cloudant/__init__.py similarity index 100% rename from tests/providers/celery/executors/__init__.py rename to providers/tests/cloudant/__init__.py diff --git a/tests/providers/celery/sensors/__init__.py b/providers/tests/cloudant/hooks/__init__.py similarity index 100% rename from tests/providers/celery/sensors/__init__.py rename to providers/tests/cloudant/hooks/__init__.py diff --git a/tests/providers/cloudant/hooks/test_cloudant.py b/providers/tests/cloudant/hooks/test_cloudant.py similarity index 100% rename from tests/providers/cloudant/hooks/test_cloudant.py rename to providers/tests/cloudant/hooks/test_cloudant.py diff --git a/tests/providers/apache/beam/__init__.py b/providers/tests/cncf/__init__.py similarity index 100% rename from tests/providers/apache/beam/__init__.py rename to providers/tests/cncf/__init__.py diff --git a/tests/providers/cloudant/__init__.py b/providers/tests/cncf/kubernetes/__init__.py similarity index 100% rename from tests/providers/cloudant/__init__.py rename to providers/tests/cncf/kubernetes/__init__.py diff --git a/tests/providers/apache/beam/hooks/__init__.py b/providers/tests/cncf/kubernetes/cli/__init__.py similarity index 100% rename from tests/providers/apache/beam/hooks/__init__.py rename to providers/tests/cncf/kubernetes/cli/__init__.py diff --git a/tests/providers/cncf/kubernetes/cli/test_kubernetes_command.py b/providers/tests/cncf/kubernetes/cli/test_kubernetes_command.py similarity index 99% rename from tests/providers/cncf/kubernetes/cli/test_kubernetes_command.py rename to providers/tests/cncf/kubernetes/cli/test_kubernetes_command.py index 790801a3bbcf9..62019a8d2fe90 100644 --- a/tests/providers/cncf/kubernetes/cli/test_kubernetes_command.py +++ b/providers/tests/cncf/kubernetes/cli/test_kubernetes_command.py @@ -28,7 +28,8 @@ from airflow.cli import cli_parser from airflow.executors import executor_loader from airflow.providers.cncf.kubernetes.cli import kubernetes_command -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/cncf/kubernetes/conftest.py b/providers/tests/cncf/kubernetes/conftest.py similarity index 100% rename from tests/providers/cncf/kubernetes/conftest.py rename to providers/tests/cncf/kubernetes/conftest.py diff --git a/tests/providers/apache/beam/operators/__init__.py b/providers/tests/cncf/kubernetes/data_files/__init__.py similarity index 100% rename from tests/providers/apache/beam/operators/__init__.py rename to providers/tests/cncf/kubernetes/data_files/__init__.py diff --git a/tests/providers/apache/beam/triggers/__init__.py b/providers/tests/cncf/kubernetes/data_files/executor/__init__.py similarity index 100% rename from tests/providers/apache/beam/triggers/__init__.py rename to providers/tests/cncf/kubernetes/data_files/executor/__init__.py diff --git a/tests/providers/cncf/kubernetes/data_files/executor/basic_template.yaml b/providers/tests/cncf/kubernetes/data_files/executor/basic_template.yaml similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/executor/basic_template.yaml rename to providers/tests/cncf/kubernetes/data_files/executor/basic_template.yaml diff --git a/tests/providers/cncf/kubernetes/data_files/kube_config b/providers/tests/cncf/kubernetes/data_files/kube_config similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/kube_config rename to providers/tests/cncf/kubernetes/data_files/kube_config diff --git a/tests/providers/apache/cassandra/__init__.py b/providers/tests/cncf/kubernetes/data_files/pods/__init__.py similarity index 100% rename from tests/providers/apache/cassandra/__init__.py rename to providers/tests/cncf/kubernetes/data_files/pods/__init__.py diff --git a/tests/providers/cncf/kubernetes/data_files/pods/generator_base.yaml b/providers/tests/cncf/kubernetes/data_files/pods/generator_base.yaml similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/pods/generator_base.yaml rename to providers/tests/cncf/kubernetes/data_files/pods/generator_base.yaml diff --git a/tests/providers/cncf/kubernetes/data_files/pods/generator_base_with_secrets.yaml b/providers/tests/cncf/kubernetes/data_files/pods/generator_base_with_secrets.yaml similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/pods/generator_base_with_secrets.yaml rename to providers/tests/cncf/kubernetes/data_files/pods/generator_base_with_secrets.yaml diff --git a/tests/providers/cncf/kubernetes/data_files/pods/template.yaml b/providers/tests/cncf/kubernetes/data_files/pods/template.yaml similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/pods/template.yaml rename to providers/tests/cncf/kubernetes/data_files/pods/template.yaml diff --git a/tests/providers/apache/cassandra/sensors/__init__.py b/providers/tests/cncf/kubernetes/data_files/spark/__init__.py similarity index 100% rename from tests/providers/apache/cassandra/sensors/__init__.py rename to providers/tests/cncf/kubernetes/data_files/spark/__init__.py diff --git a/tests/providers/cncf/kubernetes/data_files/spark/application_template.yaml b/providers/tests/cncf/kubernetes/data_files/spark/application_template.yaml similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/spark/application_template.yaml rename to providers/tests/cncf/kubernetes/data_files/spark/application_template.yaml diff --git a/tests/providers/cncf/kubernetes/data_files/spark/application_test.json b/providers/tests/cncf/kubernetes/data_files/spark/application_test.json similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/spark/application_test.json rename to providers/tests/cncf/kubernetes/data_files/spark/application_test.json diff --git a/tests/providers/cncf/kubernetes/data_files/spark/application_test.yaml b/providers/tests/cncf/kubernetes/data_files/spark/application_test.yaml similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/spark/application_test.yaml rename to providers/tests/cncf/kubernetes/data_files/spark/application_test.yaml diff --git a/tests/providers/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.json b/providers/tests/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.json similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.json rename to providers/tests/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.json diff --git a/tests/providers/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.yaml b/providers/tests/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.yaml similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.yaml rename to providers/tests/cncf/kubernetes/data_files/spark/application_test_with_no_name_from_config.yaml diff --git a/tests/providers/apache/druid/transfers/__init__.py b/providers/tests/cncf/kubernetes/decorators/__init__.py similarity index 100% rename from tests/providers/apache/druid/transfers/__init__.py rename to providers/tests/cncf/kubernetes/decorators/__init__.py diff --git a/tests/providers/cncf/kubernetes/decorators/test_kubernetes.py b/providers/tests/cncf/kubernetes/decorators/test_kubernetes.py similarity index 100% rename from tests/providers/cncf/kubernetes/decorators/test_kubernetes.py rename to providers/tests/cncf/kubernetes/decorators/test_kubernetes.py diff --git a/tests/providers/apache/hive/transfers/__init__.py b/providers/tests/cncf/kubernetes/executors/__init__.py similarity index 100% rename from tests/providers/apache/hive/transfers/__init__.py rename to providers/tests/cncf/kubernetes/executors/__init__.py diff --git a/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py b/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py similarity index 99% rename from tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py rename to providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py index 12435426dd899..5240bf0faecb2 100644 --- a/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py +++ b/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py @@ -54,8 +54,9 @@ from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator from airflow.utils import timezone from airflow.utils.state import State, TaskInstanceState -from tests.test_utils.compat import BashOperator -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import BashOperator +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.skip_if_database_isolation_mode @@ -1608,12 +1609,15 @@ def setup_method(self): self.events = [] def _run(self): - with mock.patch( - "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.watch" - ) as mock_watch, mock.patch.object( - KubernetesJobWatcher, - "_pod_events", - ) as mock_pod_events: + with ( + mock.patch( + "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.watch" + ) as mock_watch, + mock.patch.object( + KubernetesJobWatcher, + "_pod_events", + ) as mock_pod_events, + ): mock_watch.Watch.return_value.stream.return_value = self.events mock_pod_events.return_value = self.events latest_resource_version = self.watcher._run( diff --git a/tests/providers/cncf/kubernetes/executors/test_local_kubernetes_executor.py b/providers/tests/cncf/kubernetes/executors/test_local_kubernetes_executor.py similarity index 100% rename from tests/providers/cncf/kubernetes/executors/test_local_kubernetes_executor.py rename to providers/tests/cncf/kubernetes/executors/test_local_kubernetes_executor.py diff --git a/tests/providers/apache/iceberg/__init__.py b/providers/tests/cncf/kubernetes/hooks/__init__.py similarity index 100% rename from tests/providers/apache/iceberg/__init__.py rename to providers/tests/cncf/kubernetes/hooks/__init__.py diff --git a/tests/providers/cncf/kubernetes/hooks/test_kubernetes.py b/providers/tests/cncf/kubernetes/hooks/test_kubernetes.py similarity index 99% rename from tests/providers/cncf/kubernetes/hooks/test_kubernetes.py rename to providers/tests/cncf/kubernetes/hooks/test_kubernetes.py index 065768def24ea..a6847e9c2fe56 100644 --- a/tests/providers/cncf/kubernetes/hooks/test_kubernetes.py +++ b/providers/tests/cncf/kubernetes/hooks/test_kubernetes.py @@ -36,8 +36,9 @@ from airflow.providers.cncf.kubernetes.hooks.kubernetes import AsyncKubernetesHook, KubernetesHook from airflow.utils import db from airflow.utils.db import merge_conn -from tests.test_utils.db import clear_db_connections -from tests.test_utils.providers import get_provider_min_airflow_version + +from dev.tests_common.test_utils.db import clear_db_connections +from dev.tests_common.test_utils.providers import get_provider_min_airflow_version pytestmark = pytest.mark.db_test @@ -677,8 +678,9 @@ class TestKubernetesHookIncorrectConfiguration: ) def test_should_raise_exception_on_invalid_configuration(self, conn_uri): kubernetes_hook = KubernetesHook() - with mock.patch.dict("os.environ", AIRFLOW_CONN_KUBERNETES_DEFAULT=conn_uri), pytest.raises( - AirflowException, match="Invalid connection configuration" + with ( + mock.patch.dict("os.environ", AIRFLOW_CONN_KUBERNETES_DEFAULT=conn_uri), + pytest.raises(AirflowException, match="Invalid connection configuration"), ): kubernetes_hook.get_conn() diff --git a/tests/providers/apache/iceberg/hooks/__init__.py b/providers/tests/cncf/kubernetes/log_handlers/__init__.py similarity index 100% rename from tests/providers/apache/iceberg/hooks/__init__.py rename to providers/tests/cncf/kubernetes/log_handlers/__init__.py diff --git a/tests/providers/cncf/kubernetes/log_handlers/test_log_handlers.py b/providers/tests/cncf/kubernetes/log_handlers/test_log_handlers.py similarity index 98% rename from tests/providers/cncf/kubernetes/log_handlers/test_log_handlers.py rename to providers/tests/cncf/kubernetes/log_handlers/test_log_handlers.py index 3f9bc963f0e60..b02f16ac92786 100644 --- a/tests/providers/cncf/kubernetes/log_handlers/test_log_handlers.py +++ b/providers/tests/cncf/kubernetes/log_handlers/test_log_handlers.py @@ -41,8 +41,9 @@ from airflow.utils.state import State, TaskInstanceState from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/cloudant/hooks/__init__.py b/providers/tests/cncf/kubernetes/models/__init__.py similarity index 100% rename from tests/providers/cloudant/hooks/__init__.py rename to providers/tests/cncf/kubernetes/models/__init__.py diff --git a/tests/providers/cncf/kubernetes/models/test_secret.py b/providers/tests/cncf/kubernetes/models/test_secret.py similarity index 100% rename from tests/providers/cncf/kubernetes/models/test_secret.py rename to providers/tests/cncf/kubernetes/models/test_secret.py diff --git a/tests/providers/apache/impala/__init__.py b/providers/tests/cncf/kubernetes/operators/__init__.py similarity index 100% rename from tests/providers/apache/impala/__init__.py rename to providers/tests/cncf/kubernetes/operators/__init__.py diff --git a/tests/providers/cncf/kubernetes/operators/test_custom_object_launcher.py b/providers/tests/cncf/kubernetes/operators/test_custom_object_launcher.py similarity index 100% rename from tests/providers/cncf/kubernetes/operators/test_custom_object_launcher.py rename to providers/tests/cncf/kubernetes/operators/test_custom_object_launcher.py diff --git a/tests/providers/cncf/kubernetes/operators/test_job.py b/providers/tests/cncf/kubernetes/operators/test_job.py similarity index 100% rename from tests/providers/cncf/kubernetes/operators/test_job.py rename to providers/tests/cncf/kubernetes/operators/test_job.py diff --git a/tests/providers/cncf/kubernetes/operators/test_pod.py b/providers/tests/cncf/kubernetes/operators/test_pod.py similarity index 99% rename from tests/providers/cncf/kubernetes/operators/test_pod.py rename to providers/tests/cncf/kubernetes/operators/test_pod.py index a4ccb4b44b4bf..ac50641370fed 100644 --- a/tests/providers/cncf/kubernetes/operators/test_pod.py +++ b/providers/tests/cncf/kubernetes/operators/test_pod.py @@ -50,7 +50,8 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.types import DagRunType -from tests.test_utils import db + +from dev.tests_common.test_utils import db pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] @@ -1513,7 +1514,8 @@ def test_get_logs_but_not_for_base_container( @patch(KUB_OP_PATH.format("find_pod")) def test_execute_sync_callbacks(self, find_pod_mock): from airflow.providers.cncf.kubernetes.callbacks import ExecutionMode - from tests.providers.cncf.kubernetes.test_callbacks import ( + + from providers.tests.cncf.kubernetes.test_callbacks import ( MockKubernetesPodOperatorCallback, MockWrapper, ) @@ -1578,7 +1580,8 @@ def test_execute_sync_callbacks(self, find_pod_mock): @patch(HOOK_CLASS, new=MagicMock) def test_execute_async_callbacks(self): from airflow.providers.cncf.kubernetes.callbacks import ExecutionMode - from tests.providers.cncf.kubernetes.test_callbacks import ( + + from providers.tests.cncf.kubernetes.test_callbacks import ( MockKubernetesPodOperatorCallback, MockWrapper, ) diff --git a/tests/providers/cncf/kubernetes/operators/test_resource.py b/providers/tests/cncf/kubernetes/operators/test_resource.py similarity index 100% rename from tests/providers/cncf/kubernetes/operators/test_resource.py rename to providers/tests/cncf/kubernetes/operators/test_resource.py diff --git a/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py b/providers/tests/cncf/kubernetes/operators/test_spark_kubernetes.py similarity index 99% rename from tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py rename to providers/tests/cncf/kubernetes/operators/test_spark_kubernetes.py index 9c8c40de6558d..18bce53688d4f 100644 --- a/tests/providers/cncf/kubernetes/operators/test_spark_kubernetes.py +++ b/providers/tests/cncf/kubernetes/operators/test_spark_kubernetes.py @@ -33,7 +33,8 @@ from airflow.providers.cncf.kubernetes.operators.spark_kubernetes import SparkKubernetesOperator from airflow.utils import db, timezone from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS @patch("airflow.providers.cncf.kubernetes.operators.spark_kubernetes.KubernetesHook") diff --git a/tests/providers/apache/impala/hooks/__init__.py b/providers/tests/cncf/kubernetes/resource_convert/__init__.py similarity index 100% rename from tests/providers/apache/impala/hooks/__init__.py rename to providers/tests/cncf/kubernetes/resource_convert/__init__.py diff --git a/tests/providers/cncf/kubernetes/resource_convert/test_configmap.py b/providers/tests/cncf/kubernetes/resource_convert/test_configmap.py similarity index 100% rename from tests/providers/cncf/kubernetes/resource_convert/test_configmap.py rename to providers/tests/cncf/kubernetes/resource_convert/test_configmap.py diff --git a/tests/providers/cncf/kubernetes/resource_convert/test_env_variable.py b/providers/tests/cncf/kubernetes/resource_convert/test_env_variable.py similarity index 100% rename from tests/providers/cncf/kubernetes/resource_convert/test_env_variable.py rename to providers/tests/cncf/kubernetes/resource_convert/test_env_variable.py diff --git a/tests/providers/cncf/kubernetes/resource_convert/test_secret.py b/providers/tests/cncf/kubernetes/resource_convert/test_secret.py similarity index 100% rename from tests/providers/cncf/kubernetes/resource_convert/test_secret.py rename to providers/tests/cncf/kubernetes/resource_convert/test_secret.py diff --git a/tests/providers/apache/kafka/__init__.py b/providers/tests/cncf/kubernetes/sensors/__init__.py similarity index 100% rename from tests/providers/apache/kafka/__init__.py rename to providers/tests/cncf/kubernetes/sensors/__init__.py diff --git a/tests/providers/cncf/kubernetes/sensors/test_spark_kubernetes.py b/providers/tests/cncf/kubernetes/sensors/test_spark_kubernetes.py similarity index 100% rename from tests/providers/cncf/kubernetes/sensors/test_spark_kubernetes.py rename to providers/tests/cncf/kubernetes/sensors/test_spark_kubernetes.py diff --git a/tests/providers/cncf/kubernetes/test_callbacks.py b/providers/tests/cncf/kubernetes/test_callbacks.py similarity index 100% rename from tests/providers/cncf/kubernetes/test_callbacks.py rename to providers/tests/cncf/kubernetes/test_callbacks.py diff --git a/tests/providers/cncf/kubernetes/test_client.py b/providers/tests/cncf/kubernetes/test_client.py similarity index 98% rename from tests/providers/cncf/kubernetes/test_client.py rename to providers/tests/cncf/kubernetes/test_client.py index 1384068fd2860..269717ffaa0f0 100644 --- a/tests/providers/cncf/kubernetes/test_client.py +++ b/providers/tests/cncf/kubernetes/test_client.py @@ -28,7 +28,8 @@ _enable_tcp_keepalive, get_kube_client, ) -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars class TestClient: diff --git a/tests/providers/cncf/kubernetes/test_kubernetes_helper_functions.py b/providers/tests/cncf/kubernetes/test_kubernetes_helper_functions.py similarity index 100% rename from tests/providers/cncf/kubernetes/test_kubernetes_helper_functions.py rename to providers/tests/cncf/kubernetes/test_kubernetes_helper_functions.py diff --git a/tests/providers/cncf/kubernetes/test_pod_generator.py b/providers/tests/cncf/kubernetes/test_pod_generator.py similarity index 100% rename from tests/providers/cncf/kubernetes/test_pod_generator.py rename to providers/tests/cncf/kubernetes/test_pod_generator.py diff --git a/tests/providers/cncf/kubernetes/test_template_rendering.py b/providers/tests/cncf/kubernetes/test_template_rendering.py similarity index 96% rename from tests/providers/cncf/kubernetes/test_template_rendering.py rename to providers/tests/cncf/kubernetes/test_template_rendering.py index 4c087d6040e68..180b7a1e2d2ef 100644 --- a/tests/providers/cncf/kubernetes/test_template_rendering.py +++ b/providers/tests/cncf/kubernetes/test_template_rendering.py @@ -22,16 +22,18 @@ import pytest from sqlalchemy.orm import make_transient -from airflow.configuration import TEST_DAGS_FOLDER from airflow.models.renderedtifields import RenderedTaskInstanceFields, RenderedTaskInstanceFields as RTIF from airflow.providers.cncf.kubernetes.template_rendering import get_rendered_k8s_spec, render_k8s_pod_yaml +from airflow.utils import timezone from airflow.utils.session import create_session from airflow.version import version -from tests.models import DEFAULT_DATE -from tests.test_utils.compat import BashOperator + +from dev.tests_common.test_utils.compat import BashOperator pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] +DEFAULT_DATE = timezone.datetime(2021, 9, 9) + @mock.patch.dict(os.environ, {"AIRFLOW_IS_K8S_EXECUTOR_POD": "True"}) @mock.patch("airflow.settings.pod_mutation_hook") @@ -126,7 +128,7 @@ def test_get_k8s_pod_yaml(render_k8s_pod_yaml, redact, dag_maker, session): with dag_maker("test_get_k8s_pod_yaml") as dag: task = BashOperator(task_id="test", bash_command="echo hi") dr = dag_maker.create_dagrun() - dag.fileloc = TEST_DAGS_FOLDER + "/test_get_k8s_pod_yaml.py" + dag.fileloc = "/test_get_k8s_pod_yaml.py" ti = dr.task_instances[0] ti.task = task diff --git a/tests/providers/apache/kafka/hooks/__init__.py b/providers/tests/cncf/kubernetes/triggers/__init__.py similarity index 100% rename from tests/providers/apache/kafka/hooks/__init__.py rename to providers/tests/cncf/kubernetes/triggers/__init__.py diff --git a/tests/providers/cncf/kubernetes/triggers/test_job.py b/providers/tests/cncf/kubernetes/triggers/test_job.py similarity index 100% rename from tests/providers/cncf/kubernetes/triggers/test_job.py rename to providers/tests/cncf/kubernetes/triggers/test_job.py diff --git a/tests/providers/cncf/kubernetes/triggers/test_pod.py b/providers/tests/cncf/kubernetes/triggers/test_pod.py similarity index 100% rename from tests/providers/cncf/kubernetes/triggers/test_pod.py rename to providers/tests/cncf/kubernetes/triggers/test_pod.py diff --git a/tests/providers/apache/kafka/operators/__init__.py b/providers/tests/cncf/kubernetes/utils/__init__.py similarity index 100% rename from tests/providers/apache/kafka/operators/__init__.py rename to providers/tests/cncf/kubernetes/utils/__init__.py diff --git a/tests/providers/cncf/kubernetes/utils/test_k8s_resource_iterator.py b/providers/tests/cncf/kubernetes/utils/test_k8s_resource_iterator.py similarity index 100% rename from tests/providers/cncf/kubernetes/utils/test_k8s_resource_iterator.py rename to providers/tests/cncf/kubernetes/utils/test_k8s_resource_iterator.py diff --git a/tests/providers/cncf/kubernetes/utils/test_pod_manager.py b/providers/tests/cncf/kubernetes/utils/test_pod_manager.py similarity index 99% rename from tests/providers/cncf/kubernetes/utils/test_pod_manager.py rename to providers/tests/cncf/kubernetes/utils/test_pod_manager.py index 73dac5255d620..b577ea969ea3d 100644 --- a/tests/providers/cncf/kubernetes/utils/test_pod_manager.py +++ b/providers/tests/cncf/kubernetes/utils/test_pod_manager.py @@ -40,7 +40,8 @@ container_is_terminated, ) from airflow.utils.timezone import utc -from tests.providers.cncf.kubernetes.test_callbacks import MockKubernetesPodOperatorCallback, MockWrapper + +from providers.tests.cncf.kubernetes.test_callbacks import MockKubernetesPodOperatorCallback, MockWrapper if TYPE_CHECKING: from pendulum import DateTime diff --git a/tests/providers/apache/kafka/sensors/__init__.py b/providers/tests/cohere/__init__.py similarity index 100% rename from tests/providers/apache/kafka/sensors/__init__.py rename to providers/tests/cohere/__init__.py diff --git a/tests/providers/apache/kafka/triggers/__init__.py b/providers/tests/cohere/hooks/__init__.py similarity index 100% rename from tests/providers/apache/kafka/triggers/__init__.py rename to providers/tests/cohere/hooks/__init__.py diff --git a/tests/providers/cohere/hooks/test_cohere.py b/providers/tests/cohere/hooks/test_cohere.py similarity index 84% rename from tests/providers/cohere/hooks/test_cohere.py rename to providers/tests/cohere/hooks/test_cohere.py index 8f566ec0c6405..28aef3ebaf795 100644 --- a/tests/providers/cohere/hooks/test_cohere.py +++ b/providers/tests/cohere/hooks/test_cohere.py @@ -34,11 +34,14 @@ def test__get_api_key(self): api_url = "http://some_host.com" timeout = 150 max_retries = 5 - with patch.object( - CohereHook, - "get_connection", - return_value=Connection(conn_type="cohere", password=api_key, host=api_url), - ), patch("cohere.Client") as client: + with ( + patch.object( + CohereHook, + "get_connection", + return_value=Connection(conn_type="cohere", password=api_key, host=api_url), + ), + patch("cohere.Client") as client, + ): hook = CohereHook(timeout=timeout, max_retries=max_retries) _ = hook.get_conn client.assert_called_once_with( diff --git a/tests/providers/apache/livy/__init__.py b/providers/tests/cohere/operators/__init__.py similarity index 100% rename from tests/providers/apache/livy/__init__.py rename to providers/tests/cohere/operators/__init__.py diff --git a/tests/providers/cohere/operators/test_embedding.py b/providers/tests/cohere/operators/test_embedding.py similarity index 100% rename from tests/providers/cohere/operators/test_embedding.py rename to providers/tests/cohere/operators/test_embedding.py diff --git a/tests/providers/apache/livy/hooks/__init__.py b/providers/tests/common/__init__.py similarity index 100% rename from tests/providers/apache/livy/hooks/__init__.py rename to providers/tests/common/__init__.py diff --git a/tests/providers/apache/livy/operators/__init__.py b/providers/tests/common/compat/__init__.py similarity index 100% rename from tests/providers/apache/livy/operators/__init__.py rename to providers/tests/common/compat/__init__.py diff --git a/tests/providers/apache/livy/sensors/__init__.py b/providers/tests/common/compat/lineage/__init__.py similarity index 100% rename from tests/providers/apache/livy/sensors/__init__.py rename to providers/tests/common/compat/lineage/__init__.py diff --git a/tests/providers/common/compat/lineage/test_hook.py b/providers/tests/common/compat/lineage/test_hook.py similarity index 100% rename from tests/providers/common/compat/lineage/test_hook.py rename to providers/tests/common/compat/lineage/test_hook.py diff --git a/tests/providers/apache/livy/triggers/__init__.py b/providers/tests/common/compat/openlineage/__init__.py similarity index 100% rename from tests/providers/apache/livy/triggers/__init__.py rename to providers/tests/common/compat/openlineage/__init__.py diff --git a/tests/providers/common/compat/openlineage/test_facet.py b/providers/tests/common/compat/openlineage/test_facet.py similarity index 100% rename from tests/providers/common/compat/openlineage/test_facet.py rename to providers/tests/common/compat/openlineage/test_facet.py diff --git a/tests/providers/apache/pig/operators/__init__.py b/providers/tests/common/compat/openlineage/utils/__init__.py similarity index 100% rename from tests/providers/apache/pig/operators/__init__.py rename to providers/tests/common/compat/openlineage/utils/__init__.py diff --git a/tests/providers/common/compat/openlineage/utils/test_utils.py b/providers/tests/common/compat/openlineage/utils/test_utils.py similarity index 100% rename from tests/providers/common/compat/openlineage/utils/test_utils.py rename to providers/tests/common/compat/openlineage/utils/test_utils.py diff --git a/tests/providers/apache/spark/decorators/__init__.py b/providers/tests/common/compat/security/__init__.py similarity index 100% rename from tests/providers/apache/spark/decorators/__init__.py rename to providers/tests/common/compat/security/__init__.py diff --git a/tests/providers/common/compat/security/test_permissions.py b/providers/tests/common/compat/security/test_permissions.py similarity index 100% rename from tests/providers/common/compat/security/test_permissions.py rename to providers/tests/common/compat/security/test_permissions.py diff --git a/tests/providers/apprise/__init__.py b/providers/tests/common/io/__init__.py similarity index 100% rename from tests/providers/apprise/__init__.py rename to providers/tests/common/io/__init__.py diff --git a/tests/providers/arangodb/__init__.py b/providers/tests/common/io/assets/__init__.py similarity index 100% rename from tests/providers/arangodb/__init__.py rename to providers/tests/common/io/assets/__init__.py diff --git a/tests/providers/common/io/assets/test_file.py b/providers/tests/common/io/assets/test_file.py similarity index 100% rename from tests/providers/common/io/assets/test_file.py rename to providers/tests/common/io/assets/test_file.py diff --git a/tests/providers/arangodb/hooks/__init__.py b/providers/tests/common/io/operators/__init__.py similarity index 100% rename from tests/providers/arangodb/hooks/__init__.py rename to providers/tests/common/io/operators/__init__.py diff --git a/tests/providers/common/io/operators/test_file_transfer.py b/providers/tests/common/io/operators/test_file_transfer.py similarity index 97% rename from tests/providers/common/io/operators/test_file_transfer.py rename to providers/tests/common/io/operators/test_file_transfer.py index 698c33582b82c..55a196fa8918c 100644 --- a/tests/providers/common/io/operators/test_file_transfer.py +++ b/providers/tests/common/io/operators/test_file_transfer.py @@ -20,7 +20,8 @@ from unittest import mock from airflow.providers.common.compat.openlineage.facet import Dataset -from tests.test_utils.compat import ignore_provider_compatibility_error + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.8.0", __file__): from airflow.providers.common.io.operators.file_transfer import FileTransferOperator diff --git a/tests/providers/arangodb/operators/__init__.py b/providers/tests/common/io/xcom/__init__.py similarity index 100% rename from tests/providers/arangodb/operators/__init__.py rename to providers/tests/common/io/xcom/__init__.py diff --git a/tests/providers/common/io/xcom/test_backend.py b/providers/tests/common/io/xcom/test_backend.py similarity index 96% rename from tests/providers/common/io/xcom/test_backend.py rename to providers/tests/common/io/xcom/test_backend.py index b0516ad487d44..ae45a2f863b78 100644 --- a/tests/providers/common/io/xcom/test_backend.py +++ b/providers/tests/common/io/xcom/test_backend.py @@ -19,8 +19,8 @@ import pytest -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS, ignore_provider_compatibility_error -from tests.www.test_utils import is_db_isolation_mode +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS, ignore_provider_compatibility_error +from dev.tests_common.test_utils.db import is_db_isolation_mode pytestmark = [ pytest.mark.db_test, @@ -37,8 +37,9 @@ from airflow.utils import timezone from airflow.utils.xcom import XCOM_RETURN_KEY -from tests.test_utils import db -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.config import conf_vars @pytest.fixture(autouse=True) diff --git a/tests/providers/arangodb/sensors/__init__.py b/providers/tests/common/sql/__init__.py similarity index 100% rename from tests/providers/arangodb/sensors/__init__.py rename to providers/tests/common/sql/__init__.py diff --git a/tests/providers/atlassian/__init__.py b/providers/tests/common/sql/hooks/__init__.py similarity index 100% rename from tests/providers/atlassian/__init__.py rename to providers/tests/common/sql/hooks/__init__.py diff --git a/tests/providers/common/sql/hooks/test_dbapi.py b/providers/tests/common/sql/hooks/test_dbapi.py similarity index 99% rename from tests/providers/common/sql/hooks/test_dbapi.py rename to providers/tests/common/sql/hooks/test_dbapi.py index 6a744e9718da1..5017a5f347c82 100644 --- a/tests/providers/common/sql/hooks/test_dbapi.py +++ b/providers/tests/common/sql/hooks/test_dbapi.py @@ -29,7 +29,8 @@ from airflow.hooks.base import BaseHook from airflow.models import Connection from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler, fetch_one_handler -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"), diff --git a/tests/providers/common/sql/hooks/test_sql.py b/providers/tests/common/sql/hooks/test_sql.py similarity index 98% rename from tests/providers/common/sql/hooks/test_sql.py rename to providers/tests/common/sql/hooks/test_sql.py index 9bc246192b77f..87673f6de62e0 100644 --- a/tests/providers/common/sql/hooks/test_sql.py +++ b/providers/tests/common/sql/hooks/test_sql.py @@ -30,8 +30,9 @@ from airflow.models import Connection from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler from airflow.utils.session import provide_session -from tests.providers.common.sql.test_utils import mock_hook -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS +from providers.tests.common.sql.test_utils import mock_hook pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"), diff --git a/tests/providers/common/sql/hooks/test_sqlparse.py b/providers/tests/common/sql/hooks/test_sqlparse.py similarity index 96% rename from tests/providers/common/sql/hooks/test_sqlparse.py rename to providers/tests/common/sql/hooks/test_sqlparse.py index 1ce7cb5d2183f..b3622bd8eff21 100644 --- a/tests/providers/common/sql/hooks/test_sqlparse.py +++ b/providers/tests/common/sql/hooks/test_sqlparse.py @@ -19,7 +19,8 @@ import pytest from airflow.providers.common.sql.hooks.sql import DbApiHook -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"), diff --git a/tests/providers/atlassian/jira/__init__.py b/providers/tests/common/sql/operators/__init__.py similarity index 100% rename from tests/providers/atlassian/jira/__init__.py rename to providers/tests/common/sql/operators/__init__.py diff --git a/tests/providers/common/sql/operators/test_sql.py b/providers/tests/common/sql/operators/test_sql.py similarity index 99% rename from tests/providers/common/sql/operators/test_sql.py rename to providers/tests/common/sql/operators/test_sql.py index 5144b05b07e2c..c2e4bc6f2e86f 100644 --- a/tests/providers/common/sql/operators/test_sql.py +++ b/providers/tests/common/sql/operators/test_sql.py @@ -43,7 +43,8 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/common/sql/operators/test_sql_execute.py b/providers/tests/common/sql/operators/test_sql_execute.py similarity index 99% rename from tests/providers/common/sql/operators/test_sql_execute.py rename to providers/tests/common/sql/operators/test_sql_execute.py index 1527f3190d5ba..bb42cbaba2026 100644 --- a/tests/providers/common/sql/operators/test_sql_execute.py +++ b/providers/tests/common/sql/operators/test_sql_execute.py @@ -33,7 +33,8 @@ from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator from airflow.providers.openlineage.extractors.base import OperatorLineage -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"), diff --git a/tests/providers/atlassian/jira/hooks/__init__.py b/providers/tests/common/sql/sensors/__init__.py similarity index 100% rename from tests/providers/atlassian/jira/hooks/__init__.py rename to providers/tests/common/sql/sensors/__init__.py diff --git a/tests/providers/common/sql/sensors/test_sql.py b/providers/tests/common/sql/sensors/test_sql.py similarity index 99% rename from tests/providers/common/sql/sensors/test_sql.py rename to providers/tests/common/sql/sensors/test_sql.py index f4437a265a08c..33e8fe6399738 100644 --- a/tests/providers/common/sql/sensors/test_sql.py +++ b/providers/tests/common/sql/sensors/test_sql.py @@ -26,7 +26,8 @@ from airflow.providers.common.sql.hooks.sql import DbApiHook from airflow.providers.common.sql.sensors.sql import SqlSensor from airflow.utils.timezone import datetime -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.8.0+ only"), diff --git a/tests/providers/common/sql/test_utils.py b/providers/tests/common/sql/test_utils.py similarity index 96% rename from tests/providers/common/sql/test_utils.py rename to providers/tests/common/sql/test_utils.py index 7c76f3a7fa507..19b7bcc339411 100644 --- a/tests/providers/common/sql/test_utils.py +++ b/providers/tests/common/sql/test_utils.py @@ -23,7 +23,8 @@ import pytest from airflow.models import Connection -from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"), diff --git a/tests/providers/conftest.py b/providers/tests/conftest.py similarity index 81% rename from tests/providers/conftest.py rename to providers/tests/conftest.py index 7dd0079ae6c7a..6ca7990e6054f 100644 --- a/tests/providers/conftest.py +++ b/providers/tests/conftest.py @@ -17,11 +17,20 @@ from __future__ import annotations +import pathlib from unittest import mock import pytest -from airflow.models import Connection +pytest_plugins = "dev.tests_common.pytest_plugin" + + +@pytest.hookimpl(tryfirst=True) +def pytest_configure(config: pytest.Config) -> None: + dep_path = [pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml")] + config.inicfg["airflow_deprecations_ignore"] = ( + config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] + ) @pytest.fixture @@ -34,6 +43,8 @@ def hook_conn(request): If param is exception than add side effect. Otherwise, it raises an error """ + from airflow.models import Connection + try: conn = request.param except AttributeError: diff --git a/tests/providers/cncf/kubernetes/__init__.py b/providers/tests/databricks/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/__init__.py rename to providers/tests/databricks/__init__.py diff --git a/tests/providers/cncf/kubernetes/models/__init__.py b/providers/tests/databricks/hooks/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/models/__init__.py rename to providers/tests/databricks/hooks/__init__.py diff --git a/tests/providers/databricks/hooks/test_databricks.py b/providers/tests/databricks/hooks/test_databricks.py similarity index 100% rename from tests/providers/databricks/hooks/test_databricks.py rename to providers/tests/databricks/hooks/test_databricks.py diff --git a/tests/providers/databricks/hooks/test_databricks_base.py b/providers/tests/databricks/hooks/test_databricks_base.py similarity index 100% rename from tests/providers/databricks/hooks/test_databricks_base.py rename to providers/tests/databricks/hooks/test_databricks_base.py diff --git a/tests/providers/databricks/hooks/test_databricks_sql.py b/providers/tests/databricks/hooks/test_databricks_sql.py similarity index 98% rename from tests/providers/databricks/hooks/test_databricks_sql.py rename to providers/tests/databricks/hooks/test_databricks_sql.py index a1b318ad00dcd..fc1582db5d90b 100644 --- a/tests/providers/databricks/hooks/test_databricks_sql.py +++ b/providers/tests/databricks/hooks/test_databricks_sql.py @@ -243,9 +243,10 @@ def test_query( hook_descriptions, hook_results, ): - with patch( - "airflow.providers.databricks.hooks.databricks_sql.DatabricksSqlHook.get_conn" - ) as mock_conn, patch("airflow.providers.databricks.hooks.databricks_base.requests") as mock_requests: + with ( + patch("airflow.providers.databricks.hooks.databricks_sql.DatabricksSqlHook.get_conn") as mock_conn, + patch("airflow.providers.databricks.hooks.databricks_base.requests") as mock_requests, + ): mock_requests.codes.ok = 200 mock_requests.get.return_value.json.return_value = { "endpoints": [ diff --git a/tests/providers/databricks/__init__.py b/providers/tests/databricks/operators/__init__.py similarity index 100% rename from tests/providers/databricks/__init__.py rename to providers/tests/databricks/operators/__init__.py diff --git a/tests/providers/databricks/operators/test_databricks.py b/providers/tests/databricks/operators/test_databricks.py similarity index 100% rename from tests/providers/databricks/operators/test_databricks.py rename to providers/tests/databricks/operators/test_databricks.py diff --git a/tests/providers/databricks/operators/test_databricks_copy.py b/providers/tests/databricks/operators/test_databricks_copy.py similarity index 100% rename from tests/providers/databricks/operators/test_databricks_copy.py rename to providers/tests/databricks/operators/test_databricks_copy.py diff --git a/tests/providers/databricks/operators/test_databricks_repos.py b/providers/tests/databricks/operators/test_databricks_repos.py similarity index 100% rename from tests/providers/databricks/operators/test_databricks_repos.py rename to providers/tests/databricks/operators/test_databricks_repos.py diff --git a/tests/providers/databricks/operators/test_databricks_sql.py b/providers/tests/databricks/operators/test_databricks_sql.py similarity index 100% rename from tests/providers/databricks/operators/test_databricks_sql.py rename to providers/tests/databricks/operators/test_databricks_sql.py diff --git a/tests/providers/databricks/operators/test_databricks_workflow.py b/providers/tests/databricks/operators/test_databricks_workflow.py similarity index 100% rename from tests/providers/databricks/operators/test_databricks_workflow.py rename to providers/tests/databricks/operators/test_databricks_workflow.py diff --git a/tests/providers/atlassian/jira/notifications/__init__.py b/providers/tests/databricks/plugins/__init__.py similarity index 100% rename from tests/providers/atlassian/jira/notifications/__init__.py rename to providers/tests/databricks/plugins/__init__.py diff --git a/tests/providers/databricks/plugins/test_databricks_workflow.py b/providers/tests/databricks/plugins/test_databricks_workflow.py similarity index 97% rename from tests/providers/databricks/plugins/test_databricks_workflow.py rename to providers/tests/databricks/plugins/test_databricks_workflow.py index c140ac4450b8f..695466c62d266 100644 --- a/tests/providers/databricks/plugins/test_databricks_workflow.py +++ b/providers/tests/databricks/plugins/test_databricks_workflow.py @@ -41,6 +41,8 @@ ) from airflow.www.app import create_app +from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES + DAG_ID = "test_dag" TASK_ID = "test_task" RUN_ID = "test_run_1" @@ -190,6 +192,9 @@ def test_workflow_job_run_link(app): assert "https://mockhost/#job/1/run/1" in result +@pytest.mark.skipif( + RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES, reason="Web plugin test doesn't work when not against sources" +) @pytest.mark.db_test def test_workflow_job_repair_single_failed_link(app): with app.app_context(): diff --git a/tests/providers/atlassian/jira/operators/__init__.py b/providers/tests/databricks/sensors/__init__.py similarity index 100% rename from tests/providers/atlassian/jira/operators/__init__.py rename to providers/tests/databricks/sensors/__init__.py diff --git a/tests/providers/databricks/sensors/test_databricks_partition.py b/providers/tests/databricks/sensors/test_databricks_partition.py similarity index 100% rename from tests/providers/databricks/sensors/test_databricks_partition.py rename to providers/tests/databricks/sensors/test_databricks_partition.py diff --git a/tests/providers/databricks/sensors/test_databricks_sql.py b/providers/tests/databricks/sensors/test_databricks_sql.py similarity index 100% rename from tests/providers/databricks/sensors/test_databricks_sql.py rename to providers/tests/databricks/sensors/test_databricks_sql.py diff --git a/tests/providers/databricks/hooks/__init__.py b/providers/tests/databricks/triggers/__init__.py similarity index 100% rename from tests/providers/databricks/hooks/__init__.py rename to providers/tests/databricks/triggers/__init__.py diff --git a/tests/providers/databricks/triggers/test_databricks.py b/providers/tests/databricks/triggers/test_databricks.py similarity index 100% rename from tests/providers/databricks/triggers/test_databricks.py rename to providers/tests/databricks/triggers/test_databricks.py diff --git a/tests/providers/atlassian/jira/sensors/__init__.py b/providers/tests/databricks/utils/__init__.py similarity index 100% rename from tests/providers/atlassian/jira/sensors/__init__.py rename to providers/tests/databricks/utils/__init__.py diff --git a/tests/providers/databricks/utils/test_databricks.py b/providers/tests/databricks/utils/test_databricks.py similarity index 100% rename from tests/providers/databricks/utils/test_databricks.py rename to providers/tests/databricks/utils/test_databricks.py diff --git a/tests/providers/databricks/operators/__init__.py b/providers/tests/datadog/__init__.py similarity index 100% rename from tests/providers/databricks/operators/__init__.py rename to providers/tests/datadog/__init__.py diff --git a/tests/providers/databricks/triggers/__init__.py b/providers/tests/datadog/hooks/__init__.py similarity index 100% rename from tests/providers/databricks/triggers/__init__.py rename to providers/tests/datadog/hooks/__init__.py diff --git a/tests/providers/datadog/hooks/test_datadog.py b/providers/tests/datadog/hooks/test_datadog.py similarity index 100% rename from tests/providers/datadog/hooks/test_datadog.py rename to providers/tests/datadog/hooks/test_datadog.py diff --git a/tests/providers/datadog/__init__.py b/providers/tests/datadog/sensors/__init__.py similarity index 100% rename from tests/providers/datadog/__init__.py rename to providers/tests/datadog/sensors/__init__.py diff --git a/tests/providers/datadog/sensors/test_datadog.py b/providers/tests/datadog/sensors/test_datadog.py similarity index 100% rename from tests/providers/datadog/sensors/test_datadog.py rename to providers/tests/datadog/sensors/test_datadog.py diff --git a/tests/providers/celery/cli/__init__.py b/providers/tests/dbt/__init__.py similarity index 100% rename from tests/providers/celery/cli/__init__.py rename to providers/tests/dbt/__init__.py diff --git a/tests/providers/celery/log_handlers/__init__.py b/providers/tests/dbt/cloud/__init__.py similarity index 100% rename from tests/providers/celery/log_handlers/__init__.py rename to providers/tests/dbt/cloud/__init__.py diff --git a/tests/providers/cncf/__init__.py b/providers/tests/dbt/cloud/hooks/__init__.py similarity index 100% rename from tests/providers/cncf/__init__.py rename to providers/tests/dbt/cloud/hooks/__init__.py diff --git a/tests/providers/dbt/cloud/hooks/test_dbt.py b/providers/tests/dbt/cloud/hooks/test_dbt.py similarity index 99% rename from tests/providers/dbt/cloud/hooks/test_dbt.py rename to providers/tests/dbt/cloud/hooks/test_dbt.py index 0d84189bc8049..590f1b677f10c 100644 --- a/tests/providers/dbt/cloud/hooks/test_dbt.py +++ b/providers/tests/dbt/cloud/hooks/test_dbt.py @@ -676,8 +676,9 @@ def fake_sleep(seconds): # Shift frozen time every time we call a ``time.sleep`` during this test case. time_machine.shift(timedelta(seconds=seconds)) - with patch.object(DbtCloudHook, "get_job_run_status") as mock_job_run_status, patch( - "airflow.providers.dbt.cloud.hooks.dbt.time.sleep", side_effect=fake_sleep + with ( + patch.object(DbtCloudHook, "get_job_run_status") as mock_job_run_status, + patch("airflow.providers.dbt.cloud.hooks.dbt.time.sleep", side_effect=fake_sleep), ): mock_job_run_status.return_value = job_run_status diff --git a/tests/providers/cncf/kubernetes/cli/__init__.py b/providers/tests/dbt/cloud/operators/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/cli/__init__.py rename to providers/tests/dbt/cloud/operators/__init__.py diff --git a/tests/providers/dbt/cloud/operators/test_dbt.py b/providers/tests/dbt/cloud/operators/test_dbt.py similarity index 99% rename from tests/providers/dbt/cloud/operators/test_dbt.py rename to providers/tests/dbt/cloud/operators/test_dbt.py index 658fe84a49d67..eb50bd5a22a2e 100644 --- a/tests/providers/dbt/cloud/operators/test_dbt.py +++ b/providers/tests/dbt/cloud/operators/test_dbt.py @@ -235,8 +235,9 @@ def fake_sleep(seconds): overall_delta = timedelta(seconds=seconds) + timedelta(microseconds=42) time_machine.shift(overall_delta) - with patch.object(DbtCloudHook, "get_job_run") as mock_get_job_run, patch( - "airflow.providers.dbt.cloud.hooks.dbt.time.sleep", side_effect=fake_sleep + with ( + patch.object(DbtCloudHook, "get_job_run") as mock_get_job_run, + patch("airflow.providers.dbt.cloud.hooks.dbt.time.sleep", side_effect=fake_sleep), ): mock_get_job_run.return_value.json.return_value = { "data": {"status": job_run_status, "id": RUN_ID} diff --git a/tests/providers/cncf/kubernetes/data_files/__init__.py b/providers/tests/dbt/cloud/sensors/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/__init__.py rename to providers/tests/dbt/cloud/sensors/__init__.py diff --git a/tests/providers/dbt/cloud/sensors/test_dbt.py b/providers/tests/dbt/cloud/sensors/test_dbt.py similarity index 100% rename from tests/providers/dbt/cloud/sensors/test_dbt.py rename to providers/tests/dbt/cloud/sensors/test_dbt.py diff --git a/tests/providers/cncf/kubernetes/data_files/executor/__init__.py b/providers/tests/dbt/cloud/test_data/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/executor/__init__.py rename to providers/tests/dbt/cloud/test_data/__init__.py diff --git a/tests/providers/dbt/cloud/test_data/catalog.json b/providers/tests/dbt/cloud/test_data/catalog.json similarity index 100% rename from tests/providers/dbt/cloud/test_data/catalog.json rename to providers/tests/dbt/cloud/test_data/catalog.json diff --git a/tests/providers/dbt/cloud/test_data/job_run.json b/providers/tests/dbt/cloud/test_data/job_run.json similarity index 100% rename from tests/providers/dbt/cloud/test_data/job_run.json rename to providers/tests/dbt/cloud/test_data/job_run.json diff --git a/tests/providers/dbt/cloud/test_data/manifest.json b/providers/tests/dbt/cloud/test_data/manifest.json similarity index 100% rename from tests/providers/dbt/cloud/test_data/manifest.json rename to providers/tests/dbt/cloud/test_data/manifest.json diff --git a/tests/providers/dbt/cloud/test_data/run_results.json b/providers/tests/dbt/cloud/test_data/run_results.json similarity index 100% rename from tests/providers/dbt/cloud/test_data/run_results.json rename to providers/tests/dbt/cloud/test_data/run_results.json diff --git a/tests/providers/cncf/kubernetes/data_files/pods/__init__.py b/providers/tests/dbt/cloud/triggers/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/pods/__init__.py rename to providers/tests/dbt/cloud/triggers/__init__.py diff --git a/tests/providers/dbt/cloud/triggers/test_dbt.py b/providers/tests/dbt/cloud/triggers/test_dbt.py similarity index 100% rename from tests/providers/dbt/cloud/triggers/test_dbt.py rename to providers/tests/dbt/cloud/triggers/test_dbt.py diff --git a/tests/providers/cncf/kubernetes/data_files/spark/__init__.py b/providers/tests/dbt/cloud/utils/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/data_files/spark/__init__.py rename to providers/tests/dbt/cloud/utils/__init__.py diff --git a/tests/providers/dbt/cloud/utils/test_openlineage.py b/providers/tests/dbt/cloud/utils/test_openlineage.py similarity index 100% rename from tests/providers/dbt/cloud/utils/test_openlineage.py rename to providers/tests/dbt/cloud/utils/test_openlineage.py diff --git a/providers/tests/deprecations_ignore.yml b/providers/tests/deprecations_ignore.yml new file mode 100644 index 0000000000000..b5aff3c84ea5b --- /dev/null +++ b/providers/tests/deprecations_ignore.yml @@ -0,0 +1,128 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- + +- providers/tests/amazon/aws/deferrable/hooks/test_base_aws.py::TestAwsBaseAsyncHook::test_get_client_async +- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_cluster_status +- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_get_cluster_status +- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_get_cluster_status_exception +- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_pause_cluster +- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_resume_cluster +- providers/tests/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_resume_cluster_exception +- providers/tests/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_exception +- providers/tests/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_resuming_status +- providers/tests/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_success +- providers/tests/google/common/auth_backend/test_google_openid.py::TestGoogleOpenID::test_success +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_api_resource_configs +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_api_resource_configs_duplication_warning +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_queries +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_query_cancel_completed +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_query_cancel_timeout +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_query_jobs_to_cancel +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_get_dataset_tables_list +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_invalid_schema_update_and_write_disposition +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_invalid_schema_update_options +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_invalid_source_format +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_extract +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_load_with_non_csv_as_src_fmt +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_schema_update_options +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_schema_update_options_incorrect +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect_default +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect_legacy_with_query_params +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect_legacy_with_query_params_fails +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_with_arg +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_without_sql_fails +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_table_delete +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryHookRunWithConfiguration::test_run_with_configuration_location +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_create_external_table_with_kms +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_run_copy_with_kms +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_run_load_with_kms +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_run_query_with_kms +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_create_external_table_description +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_create_external_table_labels +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_run_load_description +- providers/tests/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_run_load_labels +- providers/tests/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_load_default +- providers/tests/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_load_with_arg +- providers/tests/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_query_default +- providers/tests/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_query_with_arg +- providers/tests/google/cloud/hooks/test_bigquery.py::TestDatasetsOperations::test_patch_dataset +- providers/tests/google/cloud/hooks/test_bigquery.py::TestTableOperations::test_patch_table +- providers/tests/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_load_default +- providers/tests/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_load_with_arg +- providers/tests/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_query_with_arg +- providers/tests/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_with_auto_detect +- providers/tests/google/cloud/hooks/test_gcs.py::TestGCSHook::test_list__error_match_glob_and_invalid_delimiter +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_error_operation +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_life_science_client_creation +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_run_pipeline_immediately_complete +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_waiting_operation +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_delegate_to_runtime_error +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_error_operation +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_life_science_client_creation +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_location_path +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_run_pipeline_immediately_complete +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_waiting_operation +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithoutProjectId::test_life_science_client_creation +- providers/tests/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithoutProjectId::test_run_pipeline +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_cancel_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_create_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_delete_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_get_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_list_pipeline_jobs +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_cancel_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_create_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_delete_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_get_pipeline_job +- providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_list_pipeline_jobs +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcHadoopOperator::test_execute +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcHiveOperator::test_builder +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcHiveOperator::test_execute +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcPigOperator::test_builder +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcPigOperator::test_execute +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcPySparkOperator::test_execute +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcSparkOperator::test_execute +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcSparkSqlOperator::test_builder +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcSparkSqlOperator::test_execute +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataProcSparkSqlOperator::test_execute_override_project_id +- providers/tests/google/cloud/operators/test_dataproc.py::TestDataprocClusterScaleOperator::test_execute +- providers/tests/google/cloud/operators/test_dataproc.py::test_create_cluster_operator_extra_links +- providers/tests/google/cloud/operators/test_dataproc.py::test_scale_cluster_operator_extra_links +- providers/tests/google/cloud/operators/test_dataproc.py::test_submit_spark_job_operator_extra_links +- providers/tests/google/cloud/operators/test_gcs.py::TestGoogleCloudStorageListOperator::test_execute__delimiter +- providers/tests/google/cloud/operators/test_kubernetes_engine.py::TestGoogleCloudPlatformContainerOperator::test_create_execute_error_body +- providers/tests/google/cloud/operators/test_life_sciences.py::TestLifeSciencesRunPipelineOperator::test_executes +- providers/tests/google/cloud/operators/test_life_sciences.py::TestLifeSciencesRunPipelineOperator::test_executes_without_project_id +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_copy_files_into_a_folder +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_last_modified_time +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_more_than_1_wildcard +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_no_prefix +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_no_suffix +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_prefix_and_suffix +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_empty_destination_object +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_destination_object +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_destination_object_retained_prefix +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_replace_flag_false +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_replace_flag_false_with_destination_object +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_without_destination_object +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_executes_with_a_delimiter +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_executes_with_delimiter_and_destination_object +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_executes_with_different_delimiter_and_destination_object +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_wc_with_last_modified_time_with_all_true_cond +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_wc_with_last_modified_time_with_one_true_cond +- providers/tests/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_wc_with_no_last_modified_time diff --git a/tests/providers/datadog/hooks/__init__.py b/providers/tests/dingding/__init__.py similarity index 100% rename from tests/providers/datadog/hooks/__init__.py rename to providers/tests/dingding/__init__.py diff --git a/tests/providers/datadog/sensors/__init__.py b/providers/tests/dingding/hooks/__init__.py similarity index 100% rename from tests/providers/datadog/sensors/__init__.py rename to providers/tests/dingding/hooks/__init__.py diff --git a/tests/providers/dingding/hooks/test_dingding.py b/providers/tests/dingding/hooks/test_dingding.py similarity index 100% rename from tests/providers/dingding/hooks/test_dingding.py rename to providers/tests/dingding/hooks/test_dingding.py diff --git a/tests/providers/dingding/__init__.py b/providers/tests/dingding/operators/__init__.py similarity index 100% rename from tests/providers/dingding/__init__.py rename to providers/tests/dingding/operators/__init__.py diff --git a/tests/providers/dingding/operators/test_dingding.py b/providers/tests/dingding/operators/test_dingding.py similarity index 100% rename from tests/providers/dingding/operators/test_dingding.py rename to providers/tests/dingding/operators/test_dingding.py diff --git a/tests/providers/dingding/hooks/__init__.py b/providers/tests/discord/__init__.py similarity index 100% rename from tests/providers/dingding/hooks/__init__.py rename to providers/tests/discord/__init__.py diff --git a/tests/providers/dingding/operators/__init__.py b/providers/tests/discord/hooks/__init__.py similarity index 100% rename from tests/providers/dingding/operators/__init__.py rename to providers/tests/discord/hooks/__init__.py diff --git a/tests/providers/discord/hooks/test_discord_webhook.py b/providers/tests/discord/hooks/test_discord_webhook.py similarity index 100% rename from tests/providers/discord/hooks/test_discord_webhook.py rename to providers/tests/discord/hooks/test_discord_webhook.py diff --git a/tests/providers/cncf/kubernetes/decorators/__init__.py b/providers/tests/discord/notifications/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/decorators/__init__.py rename to providers/tests/discord/notifications/__init__.py diff --git a/tests/providers/discord/notifications/test_discord.py b/providers/tests/discord/notifications/test_discord.py similarity index 100% rename from tests/providers/discord/notifications/test_discord.py rename to providers/tests/discord/notifications/test_discord.py diff --git a/tests/providers/discord/__init__.py b/providers/tests/discord/operators/__init__.py similarity index 100% rename from tests/providers/discord/__init__.py rename to providers/tests/discord/operators/__init__.py diff --git a/tests/providers/discord/operators/test_discord_webhook.py b/providers/tests/discord/operators/test_discord_webhook.py similarity index 100% rename from tests/providers/discord/operators/test_discord_webhook.py rename to providers/tests/discord/operators/test_discord_webhook.py diff --git a/tests/providers/discord/hooks/__init__.py b/providers/tests/docker/__init__.py similarity index 100% rename from tests/providers/discord/hooks/__init__.py rename to providers/tests/docker/__init__.py diff --git a/tests/providers/docker/conftest.py b/providers/tests/docker/conftest.py similarity index 100% rename from tests/providers/docker/conftest.py rename to providers/tests/docker/conftest.py diff --git a/tests/providers/cncf/kubernetes/executors/__init__.py b/providers/tests/docker/decorators/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/executors/__init__.py rename to providers/tests/docker/decorators/__init__.py diff --git a/tests/providers/docker/decorators/test_docker.py b/providers/tests/docker/decorators/test_docker.py similarity index 100% rename from tests/providers/docker/decorators/test_docker.py rename to providers/tests/docker/decorators/test_docker.py diff --git a/tests/providers/discord/operators/__init__.py b/providers/tests/docker/hooks/__init__.py similarity index 100% rename from tests/providers/discord/operators/__init__.py rename to providers/tests/docker/hooks/__init__.py diff --git a/tests/providers/docker/hooks/test_docker.py b/providers/tests/docker/hooks/test_docker.py similarity index 100% rename from tests/providers/docker/hooks/test_docker.py rename to providers/tests/docker/hooks/test_docker.py diff --git a/tests/providers/docker/__init__.py b/providers/tests/docker/operators/__init__.py similarity index 100% rename from tests/providers/docker/__init__.py rename to providers/tests/docker/operators/__init__.py diff --git a/tests/providers/docker/operators/test_docker.py b/providers/tests/docker/operators/test_docker.py similarity index 99% rename from tests/providers/docker/operators/test_docker.py rename to providers/tests/docker/operators/test_docker.py index 8a461f7c084a7..9dbc84657f4f8 100644 --- a/tests/providers/docker/operators/test_docker.py +++ b/providers/tests/docker/operators/test_docker.py @@ -860,7 +860,8 @@ def test_partial_deprecated_skip_exit_code_ambiguous( with set_current_task_instance_session(session=session): warning_match = r"`skip_exit_code` is deprecated and will be removed" for ti in tis: - with pytest.warns(AirflowProviderDeprecationWarning, match=warning_match), pytest.raises( - ValueError, match="Conflicting `skip_on_exit_code` provided" + with ( + pytest.warns(AirflowProviderDeprecationWarning, match=warning_match), + pytest.raises(ValueError, match="Conflicting `skip_on_exit_code` provided"), ): ti.render_templates() diff --git a/tests/providers/docker/operators/test_docker_swarm.py b/providers/tests/docker/operators/test_docker_swarm.py similarity index 100% rename from tests/providers/docker/operators/test_docker_swarm.py rename to providers/tests/docker/operators/test_docker_swarm.py diff --git a/tests/providers/docker/test_exceptions.py b/providers/tests/docker/test_exceptions.py similarity index 100% rename from tests/providers/docker/test_exceptions.py rename to providers/tests/docker/test_exceptions.py diff --git a/tests/providers/docker/hooks/__init__.py b/providers/tests/edge/__init__.py similarity index 100% rename from tests/providers/docker/hooks/__init__.py rename to providers/tests/edge/__init__.py diff --git a/tests/providers/docker/operators/__init__.py b/providers/tests/edge/api_endpoints/__init__.py similarity index 100% rename from tests/providers/docker/operators/__init__.py rename to providers/tests/edge/api_endpoints/__init__.py diff --git a/tests/providers/edge/api_endpoints/test_health_endpoint.py b/providers/tests/edge/api_endpoints/test_health_endpoint.py similarity index 100% rename from tests/providers/edge/api_endpoints/test_health_endpoint.py rename to providers/tests/edge/api_endpoints/test_health_endpoint.py diff --git a/tests/providers/edge/api_endpoints/test_rpc_api_endpoint.py b/providers/tests/edge/api_endpoints/test_rpc_api_endpoint.py similarity index 98% rename from tests/providers/edge/api_endpoints/test_rpc_api_endpoint.py rename to providers/tests/edge/api_endpoints/test_rpc_api_endpoint.py index becf2f9397e31..9a9b4a7dd885f 100644 --- a/tests/providers/edge/api_endpoints/test_rpc_api_endpoint.py +++ b/providers/tests/edge/api_endpoints/test_rpc_api_endpoint.py @@ -40,8 +40,9 @@ from airflow.utils.jwt_signer import JWTSigner from airflow.utils.state import State from airflow.www import app -from tests.test_utils.decorators import dont_initialize_flask_app_submodules -from tests.test_utils.mock_plugins import mock_plugin_manager + +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager # Note: Sounds a bit strange to disable internal API tests in isolation mode but... # As long as the test is modelled to run its own internal API endpoints, it is conflicting diff --git a/tests/providers/edge/__init__.py b/providers/tests/edge/cli/__init__.py similarity index 100% rename from tests/providers/edge/__init__.py rename to providers/tests/edge/cli/__init__.py diff --git a/tests/providers/edge/cli/test_edge_command.py b/providers/tests/edge/cli/test_edge_command.py similarity index 99% rename from tests/providers/edge/cli/test_edge_command.py rename to providers/tests/edge/cli/test_edge_command.py index 398c221db02f9..af3e2c00e2998 100644 --- a/tests/providers/edge/cli/test_edge_command.py +++ b/providers/tests/edge/cli/test_edge_command.py @@ -33,7 +33,8 @@ from airflow.providers.edge.models.edge_job import EdgeJob from airflow.providers.edge.models.edge_worker import EdgeWorker, EdgeWorkerState from airflow.utils.state import TaskInstanceState -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytest.importorskip("pydantic", minversion="2.0.0") diff --git a/tests/providers/edge/api_endpoints/__init__.py b/providers/tests/edge/models/__init__.py similarity index 100% rename from tests/providers/edge/api_endpoints/__init__.py rename to providers/tests/edge/models/__init__.py diff --git a/tests/providers/edge/models/test_edge_job.py b/providers/tests/edge/models/test_edge_job.py similarity index 100% rename from tests/providers/edge/models/test_edge_job.py rename to providers/tests/edge/models/test_edge_job.py diff --git a/tests/providers/edge/models/test_edge_logs.py b/providers/tests/edge/models/test_edge_logs.py similarity index 100% rename from tests/providers/edge/models/test_edge_logs.py rename to providers/tests/edge/models/test_edge_logs.py diff --git a/tests/providers/edge/models/test_edge_worker.py b/providers/tests/edge/models/test_edge_worker.py similarity index 100% rename from tests/providers/edge/models/test_edge_worker.py rename to providers/tests/edge/models/test_edge_worker.py diff --git a/tests/providers/edge/cli/__init__.py b/providers/tests/edge/plugins/__init__.py similarity index 100% rename from tests/providers/edge/cli/__init__.py rename to providers/tests/edge/plugins/__init__.py diff --git a/tests/providers/edge/plugins/test_edge_executor_plugin.py b/providers/tests/edge/plugins/test_edge_executor_plugin.py similarity index 97% rename from tests/providers/edge/plugins/test_edge_executor_plugin.py rename to providers/tests/edge/plugins/test_edge_executor_plugin.py index e3422b17da3c8..d0c5a40770b31 100644 --- a/tests/providers/edge/plugins/test_edge_executor_plugin.py +++ b/providers/tests/edge/plugins/test_edge_executor_plugin.py @@ -22,7 +22,8 @@ from airflow.plugins_manager import AirflowPlugin from airflow.providers.edge.plugins import edge_executor_plugin -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars def test_plugin_inactive(): diff --git a/tests/providers/edge/models/__init__.py b/providers/tests/elasticsearch/__init__.py similarity index 100% rename from tests/providers/edge/models/__init__.py rename to providers/tests/elasticsearch/__init__.py diff --git a/tests/providers/edge/plugins/__init__.py b/providers/tests/elasticsearch/hooks/__init__.py similarity index 100% rename from tests/providers/edge/plugins/__init__.py rename to providers/tests/elasticsearch/hooks/__init__.py diff --git a/tests/providers/elasticsearch/hooks/test_elasticsearch.py b/providers/tests/elasticsearch/hooks/test_elasticsearch.py similarity index 100% rename from tests/providers/elasticsearch/hooks/test_elasticsearch.py rename to providers/tests/elasticsearch/hooks/test_elasticsearch.py diff --git a/tests/providers/cncf/kubernetes/hooks/__init__.py b/providers/tests/elasticsearch/log/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/hooks/__init__.py rename to providers/tests/elasticsearch/log/__init__.py diff --git a/tests/providers/elasticsearch/log/elasticmock/__init__.py b/providers/tests/elasticsearch/log/elasticmock/__init__.py similarity index 98% rename from tests/providers/elasticsearch/log/elasticmock/__init__.py rename to providers/tests/elasticsearch/log/elasticmock/__init__.py index 00d92471ba9f4..44e242d114574 100644 --- a/tests/providers/elasticsearch/log/elasticmock/__init__.py +++ b/providers/tests/elasticsearch/log/elasticmock/__init__.py @@ -43,7 +43,7 @@ from unittest.mock import patch from urllib.parse import unquote, urlparse -from tests.providers.elasticsearch.log.elasticmock.fake_elasticsearch import FakeElasticsearch +from providers.tests.elasticsearch.log.elasticmock.fake_elasticsearch import FakeElasticsearch ELASTIC_INSTANCES: dict[str, FakeElasticsearch] = {} diff --git a/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py b/providers/tests/elasticsearch/log/elasticmock/fake_elasticsearch.py similarity index 99% rename from tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py rename to providers/tests/elasticsearch/log/elasticmock/fake_elasticsearch.py index 26e47cbf08f7a..1d975ee718c3f 100644 --- a/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py +++ b/providers/tests/elasticsearch/log/elasticmock/fake_elasticsearch.py @@ -22,7 +22,7 @@ from elasticsearch import Elasticsearch from elasticsearch.exceptions import NotFoundError -from tests.providers.elasticsearch.log.elasticmock.utilities import ( +from providers.tests.elasticsearch.log.elasticmock.utilities import ( MissingIndexException, get_random_id, query_params, diff --git a/tests/providers/elasticsearch/log/elasticmock/utilities/__init__.py b/providers/tests/elasticsearch/log/elasticmock/utilities/__init__.py similarity index 100% rename from tests/providers/elasticsearch/log/elasticmock/utilities/__init__.py rename to providers/tests/elasticsearch/log/elasticmock/utilities/__init__.py diff --git a/tests/providers/elasticsearch/log/test_es_json_formatter.py b/providers/tests/elasticsearch/log/test_es_json_formatter.py similarity index 100% rename from tests/providers/elasticsearch/log/test_es_json_formatter.py rename to providers/tests/elasticsearch/log/test_es_json_formatter.py diff --git a/tests/providers/elasticsearch/log/test_es_response.py b/providers/tests/elasticsearch/log/test_es_response.py similarity index 100% rename from tests/providers/elasticsearch/log/test_es_response.py rename to providers/tests/elasticsearch/log/test_es_response.py diff --git a/tests/providers/elasticsearch/log/test_es_task_handler.py b/providers/tests/elasticsearch/log/test_es_task_handler.py similarity index 99% rename from tests/providers/elasticsearch/log/test_es_task_handler.py rename to providers/tests/elasticsearch/log/test_es_task_handler.py index 9321f49d77878..abde5daf8bf18 100644 --- a/tests/providers/elasticsearch/log/test_es_task_handler.py +++ b/providers/tests/elasticsearch/log/test_es_task_handler.py @@ -43,10 +43,11 @@ from airflow.utils import timezone from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.timezone import datetime -from tests.providers.elasticsearch.log.elasticmock import elasticmock -from tests.providers.elasticsearch.log.elasticmock.utilities import SearchFailedException -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs +from providers.tests.elasticsearch.log.elasticmock import elasticmock +from providers.tests.elasticsearch.log.elasticmock.utilities import SearchFailedException pytestmark = pytest.mark.db_test diff --git a/tests/providers/elasticsearch/__init__.py b/providers/tests/email/__init__.py similarity index 100% rename from tests/providers/elasticsearch/__init__.py rename to providers/tests/email/__init__.py diff --git a/tests/providers/elasticsearch/hooks/__init__.py b/providers/tests/email/operators/__init__.py similarity index 100% rename from tests/providers/elasticsearch/hooks/__init__.py rename to providers/tests/email/operators/__init__.py diff --git a/tests/providers/email/__init__.py b/providers/tests/exasol/__init__.py similarity index 100% rename from tests/providers/email/__init__.py rename to providers/tests/exasol/__init__.py diff --git a/tests/providers/email/operators/__init__.py b/providers/tests/exasol/hooks/__init__.py similarity index 100% rename from tests/providers/email/operators/__init__.py rename to providers/tests/exasol/hooks/__init__.py diff --git a/tests/providers/exasol/hooks/test_exasol.py b/providers/tests/exasol/hooks/test_exasol.py similarity index 100% rename from tests/providers/exasol/hooks/test_exasol.py rename to providers/tests/exasol/hooks/test_exasol.py diff --git a/tests/providers/exasol/hooks/test_sql.py b/providers/tests/exasol/hooks/test_sql.py similarity index 100% rename from tests/providers/exasol/hooks/test_sql.py rename to providers/tests/exasol/hooks/test_sql.py diff --git a/tests/providers/exasol/__init__.py b/providers/tests/exasol/operators/__init__.py similarity index 100% rename from tests/providers/exasol/__init__.py rename to providers/tests/exasol/operators/__init__.py diff --git a/tests/providers/exasol/operators/test_exasol.py b/providers/tests/exasol/operators/test_exasol.py similarity index 100% rename from tests/providers/exasol/operators/test_exasol.py rename to providers/tests/exasol/operators/test_exasol.py diff --git a/tests/providers/exasol/operators/test_exasol_sql.py b/providers/tests/exasol/operators/test_exasol_sql.py similarity index 100% rename from tests/providers/exasol/operators/test_exasol_sql.py rename to providers/tests/exasol/operators/test_exasol_sql.py diff --git a/tests/providers/exasol/hooks/__init__.py b/providers/tests/fab/__init__.py similarity index 100% rename from tests/providers/exasol/hooks/__init__.py rename to providers/tests/fab/__init__.py diff --git a/tests/providers/exasol/operators/__init__.py b/providers/tests/fab/auth_manager/__init__.py similarity index 100% rename from tests/providers/exasol/operators/__init__.py rename to providers/tests/fab/auth_manager/__init__.py diff --git a/tests/providers/fab/__init__.py b/providers/tests/fab/auth_manager/api/__init__.py similarity index 100% rename from tests/providers/fab/__init__.py rename to providers/tests/fab/auth_manager/api/__init__.py diff --git a/tests/providers/cncf/kubernetes/log_handlers/__init__.py b/providers/tests/fab/auth_manager/api/auth/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/log_handlers/__init__.py rename to providers/tests/fab/auth_manager/api/auth/__init__.py diff --git a/tests/providers/cncf/kubernetes/operators/__init__.py b/providers/tests/fab/auth_manager/api/auth/backend/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/operators/__init__.py rename to providers/tests/fab/auth_manager/api/auth/backend/__init__.py diff --git a/tests/providers/fab/auth_manager/api/auth/backend/test_basic_auth.py b/providers/tests/fab/auth_manager/api/auth/backend/test_basic_auth.py similarity index 98% rename from tests/providers/fab/auth_manager/api/auth/backend/test_basic_auth.py rename to providers/tests/fab/auth_manager/api/auth/backend/test_basic_auth.py index 4a5104829a065..2c783e2046f88 100644 --- a/tests/providers/fab/auth_manager/api/auth/backend/test_basic_auth.py +++ b/providers/tests/fab/auth_manager/api/auth/backend/test_basic_auth.py @@ -24,7 +24,8 @@ from airflow.providers.fab.auth_manager.api.auth.backend.basic_auth import requires_authentication from airflow.www import app as application -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), diff --git a/tests/providers/fab/auth_manager/api/auth/backend/test_kerberos_auth.py b/providers/tests/fab/auth_manager/api/auth/backend/test_kerberos_auth.py similarity index 92% rename from tests/providers/fab/auth_manager/api/auth/backend/test_kerberos_auth.py rename to providers/tests/fab/auth_manager/api/auth/backend/test_kerberos_auth.py index c763042e1c955..7d13a89e8db80 100644 --- a/tests/providers/fab/auth_manager/api/auth/backend/test_kerberos_auth.py +++ b/providers/tests/fab/auth_manager/api/auth/backend/test_kerberos_auth.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import init_app diff --git a/tests/providers/cncf/kubernetes/resource_convert/__init__.py b/providers/tests/fab/auth_manager/api_endpoints/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/resource_convert/__init__.py rename to providers/tests/fab/auth_manager/api_endpoints/__init__.py diff --git a/tests/providers/fab/auth_manager/api_endpoints/api_connexion_utils.py b/providers/tests/fab/auth_manager/api_endpoints/api_connexion_utils.py similarity index 97% rename from tests/providers/fab/auth_manager/api_endpoints/api_connexion_utils.py rename to providers/tests/fab/auth_manager/api_endpoints/api_connexion_utils.py index 61d923d5ff125..e4cbe93c9d62a 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/api_connexion_utils.py +++ b/providers/tests/fab/auth_manager/api_endpoints/api_connexion_utils.py @@ -18,7 +18,7 @@ from contextlib import contextmanager -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.security_manager.override import EXISTING_ROLES diff --git a/tests/providers/fab/auth_manager/api_endpoints/remote_user_api_auth_backend.py b/providers/tests/fab/auth_manager/api_endpoints/remote_user_api_auth_backend.py similarity index 100% rename from tests/providers/fab/auth_manager/api_endpoints/remote_user_api_auth_backend.py rename to providers/tests/fab/auth_manager/api_endpoints/remote_user_api_auth_backend.py diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_asset_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_asset_endpoint.py similarity index 97% rename from tests/providers/fab/auth_manager/api_endpoints/test_asset_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_asset_endpoint.py index 8e749d1dd18f2..5c22e5ba7ff92 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_asset_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_asset_endpoint.py @@ -22,7 +22,13 @@ import time_machine from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS +from airflow.security import permissions +from airflow.utils import timezone + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_assets, clear_db_runs +from dev.tests_common.test_utils.www import _check_last_log +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user try: from airflow.models.asset import AssetDagRunQueue, AssetModel @@ -31,11 +37,6 @@ raise else: pass -from airflow.security import permissions -from airflow.utils import timezone -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.db import clear_db_assets, clear_db_runs -from tests.test_utils.www import _check_last_log pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_auth.py b/providers/tests/fab/auth_manager/api_endpoints/test_auth.py similarity index 95% rename from tests/providers/fab/auth_manager/api_endpoints/test_auth.py rename to providers/tests/fab/auth_manager/api_endpoints/test_auth.py index d3012e2f1b43e..630ce7050bed5 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_auth.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_auth.py @@ -21,11 +21,11 @@ import pytest from flask_login import current_user -from tests.test_utils.api_connexion_utils import assert_401 -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_pools -from tests.test_utils.www import client_with_login +from dev.tests_common.test_utils.api_connexion_utils import assert_401 +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_pools +from dev.tests_common.test_utils.www import client_with_login pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_backfill_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py similarity index 97% rename from tests/providers/fab/auth_manager/api_endpoints/test_backfill_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py index 56f135d457e9c..9d9a79af51135 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_backfill_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py @@ -25,7 +25,21 @@ import pytest from airflow.models import DagBag, DagModel -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS +from airflow.models.dag import DAG +from airflow.models.serialized_dag import SerializedDagModel +from airflow.operators.empty import EmptyOperator +from airflow.security import permissions +from airflow.utils import timezone +from airflow.utils.session import provide_session + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import ( + clear_db_backfills, + clear_db_dags, + clear_db_runs, + clear_db_serialized_dags, +) +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user try: from airflow.models.backfill import Backfill @@ -34,14 +48,6 @@ raise else: pass -from airflow.models.dag import DAG -from airflow.models.serialized_dag import SerializedDagModel -from airflow.operators.empty import EmptyOperator -from airflow.security import permissions -from airflow.utils import timezone -from airflow.utils.session import provide_session -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.db import clear_db_backfills, clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_cors.py b/providers/tests/fab/auth_manager/api_endpoints/test_cors.py similarity index 96% rename from tests/providers/fab/auth_manager/api_endpoints/test_cors.py rename to providers/tests/fab/auth_manager/api_endpoints/test_cors.py index b44eab8820ec6..8dbc4f964e61a 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_cors.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_cors.py @@ -20,9 +20,9 @@ import pytest -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_pools +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_pools pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_dag_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_dag_endpoint.py similarity index 96% rename from tests/providers/fab/auth_manager/api_endpoints/test_dag_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_dag_endpoint.py index b78ac58e442e0..e93425591f708 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_dag_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_dag_endpoint.py @@ -28,10 +28,11 @@ from airflow.operators.empty import EmptyOperator from airflow.security import permissions from airflow.utils.session import provide_session -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from dev.tests_common.test_utils.www import _check_last_log +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py similarity index 97% rename from tests/providers/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py index a58ea08ff31cf..fa09df1be74e0 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py @@ -27,7 +27,14 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import DagRunState -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( + create_user, + delete_roles, + delete_user, +) try: from airflow.utils.types import DagRunTriggeredByType, DagRunType @@ -36,12 +43,6 @@ raise else: pass -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( - create_user, - delete_roles, - delete_user, -) -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py similarity index 95% rename from tests/providers/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py index f0d9b0da298c6..27e41136555ea 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py @@ -24,9 +24,10 @@ from airflow.models import DagBag from airflow.security import permissions -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_dag_code, clear_db_dags, clear_db_serialized_dags + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_dag_code, clear_db_dags, clear_db_serialized_dags +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py similarity index 92% rename from tests/providers/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py index adfde1cc5b3eb..01a2c68091ce1 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py @@ -22,9 +22,10 @@ from airflow.models.dagwarning import DagWarning from airflow.security import permissions from airflow.utils.session import create_session -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_dag_warnings, clear_db_dags + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_dag_warnings, clear_db_dags +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_event_log_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_event_log_endpoint.py similarity index 96% rename from tests/providers/fab/auth_manager/api_endpoints/test_event_log_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_event_log_endpoint.py index acf3ca62684a1..4794893241f6f 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_event_log_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_event_log_endpoint.py @@ -21,9 +21,10 @@ from airflow.models import Log from airflow.security import permissions from airflow.utils import timezone -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_logs + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_logs +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_import_error_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_import_error_endpoint.py similarity index 96% rename from tests/providers/fab/auth_manager/api_endpoints/test_import_error_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_import_error_endpoint.py index a2fa1d028a3f2..110e8e6302553 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_import_error_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_import_error_endpoint.py @@ -21,10 +21,11 @@ from airflow.models.dag import DagModel from airflow.security import permissions from airflow.utils import timezone -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, ParseImportError -from tests.test_utils.db import clear_db_dags, clear_db_import_errors -from tests.test_utils.permissions import _resource_name + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, ParseImportError +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_import_errors +from dev.tests_common.test_utils.permissions import _resource_name +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py similarity index 99% rename from tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py index 413a49a9d86a1..b72c6fe6612f3 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py @@ -19,22 +19,21 @@ import pytest from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( +from airflow.security import permissions + +from dev.tests_common.test_utils.api_connexion_utils import assert_401 +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_role, create_user, delete_role, delete_user, ) -from tests.test_utils.api_connexion_utils import assert_401 -from tests.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.models import Role from airflow.providers.fab.auth_manager.security_manager.override import EXISTING_ROLES - -from airflow.security import permissions - pytestmark = pytest.mark.db_test diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py b/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py similarity index 97% rename from tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py rename to providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py index 4a2f0068e5e4a..f5ec73a3e9893 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py @@ -24,7 +24,8 @@ role_schema, ) from airflow.security import permissions -from tests.test_utils.api_connexion_utils import create_role, delete_role + +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_role, delete_role pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py similarity index 98% rename from tests/providers/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py index 69b3c221eae93..7d379b6f8c007 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py @@ -28,13 +28,14 @@ from airflow.utils.state import State from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, delete_roles, delete_user, ) -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_user_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_user_endpoint.py similarity index 99% rename from tests/providers/fab/auth_manager/api_endpoints/test_user_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_user_endpoint.py index 7f2c885bab52c..c7a20cb59c320 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_user_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_user_endpoint.py @@ -25,18 +25,19 @@ from airflow.security import permissions from airflow.utils import timezone from airflow.utils.session import create_session -from tests.test_utils.compat import ignore_provider_compatibility_error -with ignore_provider_compatibility_error("2.9.0+", __file__): - from airflow.providers.fab.auth_manager.models import User - -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( +from dev.tests_common.test_utils.api_connexion_utils import assert_401 +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.config import conf_vars +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, delete_role, delete_user, ) -from tests.test_utils.api_connexion_utils import assert_401 -from tests.test_utils.config import conf_vars + +with ignore_provider_compatibility_error("2.9.0+", __file__): + from airflow.providers.fab.auth_manager.models import User + pytestmark = pytest.mark.db_test diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_user_schema.py b/providers/tests/fab/auth_manager/api_endpoints/test_user_schema.py similarity index 97% rename from tests/providers/fab/auth_manager/api_endpoints/test_user_schema.py rename to providers/tests/fab/auth_manager/api_endpoints/test_user_schema.py index f3399de6a9775..b37f27abe0397 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_user_schema.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_user_schema.py @@ -18,14 +18,15 @@ import pytest -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_role, delete_role -from tests.test_utils.compat import ignore_provider_compatibility_error +from airflow.utils import timezone + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_role, delete_role with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.api_connexion.schemas.user_schema import user_collection_item_schema, user_schema from airflow.providers.fab.auth_manager.models import User -from airflow.utils import timezone TEST_EMAIL = "test@example.org" diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_variable_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_variable_endpoint.py similarity index 93% rename from tests/providers/fab/auth_manager/api_endpoints/test_variable_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_variable_endpoint.py index a8e71e1a82466..802eb4824087e 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_variable_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_variable_endpoint.py @@ -20,9 +20,10 @@ from airflow.models import Variable from airflow.security import permissions -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_variables + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_variables +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/fab/auth_manager/api_endpoints/test_xcom_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_xcom_endpoint.py similarity index 97% rename from tests/providers/fab/auth_manager/api_endpoints/test_xcom_endpoint.py rename to providers/tests/fab/auth_manager/api_endpoints/test_xcom_endpoint.py index 01336f9957c6d..06e8ee4847762 100644 --- a/tests/providers/fab/auth_manager/api_endpoints/test_xcom_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_xcom_endpoint.py @@ -29,9 +29,10 @@ from airflow.utils.dates import parse_execution_date from airflow.utils.session import create_session from airflow.utils.types import DagRunType -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.db_test, diff --git a/tests/providers/cncf/kubernetes/sensors/__init__.py b/providers/tests/fab/auth_manager/cli_commands/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/sensors/__init__.py rename to providers/tests/fab/auth_manager/cli_commands/__init__.py diff --git a/tests/providers/fab/auth_manager/cli_commands/test_db_command.py b/providers/tests/fab/auth_manager/cli_commands/test_db_command.py similarity index 100% rename from tests/providers/fab/auth_manager/cli_commands/test_db_command.py rename to providers/tests/fab/auth_manager/cli_commands/test_db_command.py diff --git a/tests/providers/fab/auth_manager/cli_commands/test_definition.py b/providers/tests/fab/auth_manager/cli_commands/test_definition.py similarity index 94% rename from tests/providers/fab/auth_manager/cli_commands/test_definition.py rename to providers/tests/fab/auth_manager/cli_commands/test_definition.py index 2db5d352ecc19..de906bef1ba30 100644 --- a/tests/providers/fab/auth_manager/cli_commands/test_definition.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_definition.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands.definition import ( diff --git a/tests/providers/fab/auth_manager/cli_commands/test_role_command.py b/providers/tests/fab/auth_manager/cli_commands/test_role_command.py similarity index 98% rename from tests/providers/fab/auth_manager/cli_commands/test_role_command.py rename to providers/tests/fab/auth_manager/cli_commands/test_role_command.py index 5f12c01860d1f..fd176702c7219 100644 --- a/tests/providers/fab/auth_manager/cli_commands/test_role_command.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_role_command.py @@ -25,8 +25,9 @@ import pytest from airflow.cli import cli_parser -from tests.test_utils.compat import ignore_provider_compatibility_error -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.config import conf_vars with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands import role_command diff --git a/tests/providers/fab/auth_manager/cli_commands/test_sync_perm_command.py b/providers/tests/fab/auth_manager/cli_commands/test_sync_perm_command.py similarity index 96% rename from tests/providers/fab/auth_manager/cli_commands/test_sync_perm_command.py rename to providers/tests/fab/auth_manager/cli_commands/test_sync_perm_command.py index 9e1817bd5617c..e0e4a70f4ac88 100644 --- a/tests/providers/fab/auth_manager/cli_commands/test_sync_perm_command.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_sync_perm_command.py @@ -22,7 +22,8 @@ import pytest from airflow.cli import cli_parser -from tests.test_utils.compat import ignore_provider_compatibility_error + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands import sync_perm_command diff --git a/tests/providers/fab/auth_manager/cli_commands/test_user_command.py b/providers/tests/fab/auth_manager/cli_commands/test_user_command.py similarity index 99% rename from tests/providers/fab/auth_manager/cli_commands/test_user_command.py rename to providers/tests/fab/auth_manager/cli_commands/test_user_command.py index b8ce2f48d6c03..5f2f66b0866a2 100644 --- a/tests/providers/fab/auth_manager/cli_commands/test_user_command.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_user_command.py @@ -26,7 +26,8 @@ import pytest from airflow.cli import cli_parser -from tests.test_utils.compat import ignore_provider_compatibility_error + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands import user_command diff --git a/tests/providers/fab/auth_manager/cli_commands/test_utils.py b/providers/tests/fab/auth_manager/cli_commands/test_utils.py similarity index 93% rename from tests/providers/fab/auth_manager/cli_commands/test_utils.py rename to providers/tests/fab/auth_manager/cli_commands/test_utils.py index fd8b1dfd50c89..f52defae6bb8f 100644 --- a/tests/providers/fab/auth_manager/cli_commands/test_utils.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_utils.py @@ -18,7 +18,7 @@ import pytest -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands.utils import get_application_builder diff --git a/tests/providers/fab/auth_manager/conftest.py b/providers/tests/fab/auth_manager/conftest.py similarity index 91% rename from tests/providers/fab/auth_manager/conftest.py rename to providers/tests/fab/auth_manager/conftest.py index a8fbe5fbdaaae..9102f5d0f65d4 100644 --- a/tests/providers/fab/auth_manager/conftest.py +++ b/providers/tests/fab/auth_manager/conftest.py @@ -19,8 +19,9 @@ import pytest from airflow.www import app -from tests.test_utils.config import conf_vars -from tests.test_utils.decorators import dont_initialize_flask_app_submodules + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules @pytest.fixture(scope="session") @@ -42,7 +43,7 @@ def factory(): ( "api", "auth_backends", - ): "tests.providers.fab.auth_manager.api_endpoints.remote_user_api_auth_backend,airflow.api.auth.backend.session", + ): "providers.tests.fab.auth_manager.api_endpoints.remote_user_api_auth_backend,airflow.api.auth.backend.session", ( "core", "auth_manager", diff --git a/tests/providers/cncf/kubernetes/triggers/__init__.py b/providers/tests/fab/auth_manager/decorators/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/triggers/__init__.py rename to providers/tests/fab/auth_manager/decorators/__init__.py diff --git a/tests/providers/fab/auth_manager/decorators/test_auth.py b/providers/tests/fab/auth_manager/decorators/test_auth.py similarity index 98% rename from tests/providers/fab/auth_manager/decorators/test_auth.py rename to providers/tests/fab/auth_manager/decorators/test_auth.py index 98f77a4f34271..202f0d6227073 100644 --- a/tests/providers/fab/auth_manager/decorators/test_auth.py +++ b/providers/tests/fab/auth_manager/decorators/test_auth.py @@ -21,7 +21,8 @@ import pytest from airflow.security.permissions import ACTION_CAN_READ, RESOURCE_DAG -from tests.test_utils.compat import ignore_provider_compatibility_error + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error permissions = [(ACTION_CAN_READ, RESOURCE_DAG)] diff --git a/tests/providers/fab/auth_manager/__init__.py b/providers/tests/fab/auth_manager/models/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/__init__.py rename to providers/tests/fab/auth_manager/models/__init__.py diff --git a/tests/providers/fab/auth_manager/models/test_anonymous_user.py b/providers/tests/fab/auth_manager/models/test_anonymous_user.py similarity index 93% rename from tests/providers/fab/auth_manager/models/test_anonymous_user.py rename to providers/tests/fab/auth_manager/models/test_anonymous_user.py index 4e365e3c8b705..419d17aa9f3f9 100644 --- a/tests/providers/fab/auth_manager/models/test_anonymous_user.py +++ b/providers/tests/fab/auth_manager/models/test_anonymous_user.py @@ -17,7 +17,7 @@ # under the License. from __future__ import annotations -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.models.anonymous_user import AnonymousUser diff --git a/tests/providers/fab/auth_manager/models/test_db.py b/providers/tests/fab/auth_manager/models/test_db.py similarity index 93% rename from tests/providers/fab/auth_manager/models/test_db.py rename to providers/tests/fab/auth_manager/models/test_db.py index 528e1cbf099ff..3af94ceed7b18 100644 --- a/tests/providers/fab/auth_manager/models/test_db.py +++ b/providers/tests/fab/auth_manager/models/test_db.py @@ -16,7 +16,6 @@ # under the License. from __future__ import annotations -import os import re from unittest import mock @@ -25,7 +24,7 @@ from alembic.migration import MigrationContext from sqlalchemy import MetaData -import airflow +import airflow.providers from airflow.settings import engine from airflow.utils.db import ( compare_server_default, @@ -38,20 +37,16 @@ class TestFABDBManager: def setup_method(self): - self.airflow_dir = os.path.dirname(airflow.__file__) + self.providers_dir: str = airflow.providers.__path__[0] def test_version_table_name_set(self, session): assert FABDBManager(session=session).version_table_name == "alembic_version_fab" def test_migration_dir_set(self, session): - assert ( - FABDBManager(session=session).migration_dir == f"{self.airflow_dir}/providers/fab/migrations" - ) + assert FABDBManager(session=session).migration_dir == f"{self.providers_dir}/fab/migrations" def test_alembic_file_set(self, session): - assert ( - FABDBManager(session=session).alembic_file == f"{self.airflow_dir}/providers/fab/alembic.ini" - ) + assert FABDBManager(session=session).alembic_file == f"{self.providers_dir}/fab/alembic.ini" def test_supports_table_dropping_set(self, session): assert FABDBManager(session=session).supports_table_dropping is True diff --git a/tests/providers/cncf/kubernetes/utils/__init__.py b/providers/tests/fab/auth_manager/security_manager/__init__.py similarity index 100% rename from tests/providers/cncf/kubernetes/utils/__init__.py rename to providers/tests/fab/auth_manager/security_manager/__init__.py diff --git a/tests/providers/fab/auth_manager/security_manager/test_constants.py b/providers/tests/fab/auth_manager/security_manager/test_constants.py similarity index 93% rename from tests/providers/fab/auth_manager/security_manager/test_constants.py rename to providers/tests/fab/auth_manager/security_manager/test_constants.py index 5a718eee4b639..a6566fd987109 100644 --- a/tests/providers/fab/auth_manager/security_manager/test_constants.py +++ b/providers/tests/fab/auth_manager/security_manager/test_constants.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.security_manager.constants import EXISTING_ROLES diff --git a/tests/providers/fab/auth_manager/security_manager/test_override.py b/providers/tests/fab/auth_manager/security_manager/test_override.py similarity index 95% rename from tests/providers/fab/auth_manager/security_manager/test_override.py rename to providers/tests/fab/auth_manager/security_manager/test_override.py index 6d85c0319dc44..2733b688d1ae5 100644 --- a/tests/providers/fab/auth_manager/security_manager/test_override.py +++ b/providers/tests/fab/auth_manager/security_manager/test_override.py @@ -19,7 +19,7 @@ from unittest import mock from unittest.mock import Mock -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.security_manager.override import FabAirflowSecurityManagerOverride diff --git a/tests/providers/fab/auth_manager/test_fab_auth_manager.py b/providers/tests/fab/auth_manager/test_fab_auth_manager.py similarity index 99% rename from tests/providers/fab/auth_manager/test_fab_auth_manager.py rename to providers/tests/fab/auth_manager/test_fab_auth_manager.py index d727b6090822f..064c8e0dfd4c7 100644 --- a/tests/providers/fab/auth_manager/test_fab_auth_manager.py +++ b/providers/tests/fab/auth_manager/test_fab_auth_manager.py @@ -31,7 +31,7 @@ except ImportError: pass -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.fab_auth_manager import FabAuthManager diff --git a/tests/providers/fab/auth_manager/test_models.py b/providers/tests/fab/auth_manager/test_models.py similarity index 96% rename from tests/providers/fab/auth_manager/test_models.py rename to providers/tests/fab/auth_manager/test_models.py index 30677d7095753..6f03be373187b 100644 --- a/tests/providers/fab/auth_manager/test_models.py +++ b/providers/tests/fab/auth_manager/test_models.py @@ -20,7 +20,7 @@ from sqlalchemy import Column, MetaData, String, Table -from tests.test_utils.compat import ignore_provider_compatibility_error +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.models import ( diff --git a/tests/providers/fab/auth_manager/test_security.py b/providers/tests/fab/auth_manager/test_security.py similarity index 98% rename from tests/providers/fab/auth_manager/test_security.py rename to providers/tests/fab/auth_manager/test_security.py index bebb52c256fc8..8c9e221b71df0 100644 --- a/tests/providers/fab/auth_manager/test_security.py +++ b/providers/tests/fab/auth_manager/test_security.py @@ -36,7 +36,8 @@ from airflow.exceptions import AirflowException from airflow.models import DagModel from airflow.models.dag import DAG -from tests.test_utils.compat import ignore_provider_compatibility_error + +from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.fab_auth_manager import FabAuthManager @@ -49,17 +50,18 @@ from airflow.www.auth import get_access_denied_message from airflow.www.extensions.init_auth_manager import get_auth_manager from airflow.www.utils import CustomSQLAInterface -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs +from dev.tests_common.test_utils.mock_security_manager import MockSecurityManager +from dev.tests_common.test_utils.permissions import _resource_name +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, create_user_scope, delete_role, delete_user, set_user_single_role, ) -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.db import clear_db_dags, clear_db_runs -from tests.test_utils.mock_security_manager import MockSecurityManager -from tests.test_utils.permissions import _resource_name if TYPE_CHECKING: from airflow.security.permissions import RESOURCE_ASSET diff --git a/tests/providers/fab/auth_manager/api/__init__.py b/providers/tests/fab/auth_manager/views/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/api/__init__.py rename to providers/tests/fab/auth_manager/views/__init__.py diff --git a/tests/providers/fab/auth_manager/views/test_permissions.py b/providers/tests/fab/auth_manager/views/test_permissions.py similarity index 93% rename from tests/providers/fab/auth_manager/views/test_permissions.py rename to providers/tests/fab/auth_manager/views/test_permissions.py index f24d9b738343b..2ac26ffe45e87 100644 --- a/tests/providers/fab/auth_manager/views/test_permissions.py +++ b/providers/tests/fab/auth_manager/views/test_permissions.py @@ -21,9 +21,10 @@ from airflow.security import permissions from airflow.www import app as application -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.www import client_with_login + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.www import client_with_login +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), diff --git a/tests/providers/fab/auth_manager/views/test_roles_list.py b/providers/tests/fab/auth_manager/views/test_roles_list.py similarity index 91% rename from tests/providers/fab/auth_manager/views/test_roles_list.py rename to providers/tests/fab/auth_manager/views/test_roles_list.py index 8de63ad5ba88a..3c1509d75350a 100644 --- a/tests/providers/fab/auth_manager/views/test_roles_list.py +++ b/providers/tests/fab/auth_manager/views/test_roles_list.py @@ -21,9 +21,10 @@ from airflow.security import permissions from airflow.www import app as application -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.www import client_with_login + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.www import client_with_login +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), diff --git a/tests/providers/fab/auth_manager/views/test_user.py b/providers/tests/fab/auth_manager/views/test_user.py similarity index 91% rename from tests/providers/fab/auth_manager/views/test_user.py rename to providers/tests/fab/auth_manager/views/test_user.py index 62b03a99e7c2c..ee068f7f319fe 100644 --- a/tests/providers/fab/auth_manager/views/test_user.py +++ b/providers/tests/fab/auth_manager/views/test_user.py @@ -21,9 +21,10 @@ from airflow.security import permissions from airflow.www import app as application -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.www import client_with_login + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.www import client_with_login +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), diff --git a/tests/providers/fab/auth_manager/views/test_user_edit.py b/providers/tests/fab/auth_manager/views/test_user_edit.py similarity index 91% rename from tests/providers/fab/auth_manager/views/test_user_edit.py rename to providers/tests/fab/auth_manager/views/test_user_edit.py index 8099f67948183..7cdc1a493b007 100644 --- a/tests/providers/fab/auth_manager/views/test_user_edit.py +++ b/providers/tests/fab/auth_manager/views/test_user_edit.py @@ -21,9 +21,10 @@ from airflow.security import permissions from airflow.www import app as application -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.www import client_with_login + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.www import client_with_login +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), diff --git a/tests/providers/fab/auth_manager/views/test_user_stats.py b/providers/tests/fab/auth_manager/views/test_user_stats.py similarity index 91% rename from tests/providers/fab/auth_manager/views/test_user_stats.py rename to providers/tests/fab/auth_manager/views/test_user_stats.py index ae09cf92252c6..e50bc87535a48 100644 --- a/tests/providers/fab/auth_manager/views/test_user_stats.py +++ b/providers/tests/fab/auth_manager/views/test_user_stats.py @@ -21,9 +21,10 @@ from airflow.security import permissions from airflow.www import app as application -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.www import client_with_login + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.www import client_with_login +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), diff --git a/tests/providers/cohere/__init__.py b/providers/tests/facebook/__init__.py similarity index 100% rename from tests/providers/cohere/__init__.py rename to providers/tests/facebook/__init__.py diff --git a/tests/providers/cohere/hooks/__init__.py b/providers/tests/facebook/ads/__init__.py similarity index 100% rename from tests/providers/cohere/hooks/__init__.py rename to providers/tests/facebook/ads/__init__.py diff --git a/tests/providers/cohere/operators/__init__.py b/providers/tests/facebook/ads/hooks/__init__.py similarity index 100% rename from tests/providers/cohere/operators/__init__.py rename to providers/tests/facebook/ads/hooks/__init__.py diff --git a/tests/providers/facebook/ads/hooks/test_ads.py b/providers/tests/facebook/ads/hooks/test_ads.py similarity index 100% rename from tests/providers/facebook/ads/hooks/test_ads.py rename to providers/tests/facebook/ads/hooks/test_ads.py diff --git a/tests/providers/fab/auth_manager/models/__init__.py b/providers/tests/ftp/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/models/__init__.py rename to providers/tests/ftp/__init__.py diff --git a/tests/providers/fab/auth_manager/views/__init__.py b/providers/tests/ftp/hooks/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/views/__init__.py rename to providers/tests/ftp/hooks/__init__.py diff --git a/tests/providers/ftp/hooks/test_ftp.py b/providers/tests/ftp/hooks/test_ftp.py similarity index 100% rename from tests/providers/ftp/hooks/test_ftp.py rename to providers/tests/ftp/hooks/test_ftp.py diff --git a/tests/providers/common/__init__.py b/providers/tests/ftp/operators/__init__.py similarity index 100% rename from tests/providers/common/__init__.py rename to providers/tests/ftp/operators/__init__.py diff --git a/tests/providers/ftp/operators/test_ftp.py b/providers/tests/ftp/operators/test_ftp.py similarity index 100% rename from tests/providers/ftp/operators/test_ftp.py rename to providers/tests/ftp/operators/test_ftp.py diff --git a/tests/providers/ftp/__init__.py b/providers/tests/ftp/sensors/__init__.py similarity index 100% rename from tests/providers/ftp/__init__.py rename to providers/tests/ftp/sensors/__init__.py diff --git a/tests/providers/ftp/sensors/test_ftp.py b/providers/tests/ftp/sensors/test_ftp.py similarity index 100% rename from tests/providers/ftp/sensors/test_ftp.py rename to providers/tests/ftp/sensors/test_ftp.py diff --git a/tests/providers/common/compat/__init__.py b/providers/tests/github/__init__.py similarity index 100% rename from tests/providers/common/compat/__init__.py rename to providers/tests/github/__init__.py diff --git a/tests/providers/common/compat/lineage/__init__.py b/providers/tests/github/hooks/__init__.py similarity index 100% rename from tests/providers/common/compat/lineage/__init__.py rename to providers/tests/github/hooks/__init__.py diff --git a/tests/providers/github/hooks/test_github.py b/providers/tests/github/hooks/test_github.py similarity index 100% rename from tests/providers/github/hooks/test_github.py rename to providers/tests/github/hooks/test_github.py diff --git a/tests/providers/common/compat/openlineage/__init__.py b/providers/tests/github/operators/__init__.py similarity index 100% rename from tests/providers/common/compat/openlineage/__init__.py rename to providers/tests/github/operators/__init__.py diff --git a/tests/providers/github/operators/test_github.py b/providers/tests/github/operators/test_github.py similarity index 100% rename from tests/providers/github/operators/test_github.py rename to providers/tests/github/operators/test_github.py diff --git a/tests/providers/common/compat/openlineage/utils/__init__.py b/providers/tests/github/sensors/__init__.py similarity index 100% rename from tests/providers/common/compat/openlineage/utils/__init__.py rename to providers/tests/github/sensors/__init__.py diff --git a/tests/providers/github/sensors/test_github.py b/providers/tests/github/sensors/test_github.py similarity index 100% rename from tests/providers/github/sensors/test_github.py rename to providers/tests/github/sensors/test_github.py diff --git a/tests/providers/common/compat/security/__init__.py b/providers/tests/google/__init__.py similarity index 100% rename from tests/providers/common/compat/security/__init__.py rename to providers/tests/google/__init__.py diff --git a/tests/providers/common/io/__init__.py b/providers/tests/google/ads/__init__.py similarity index 100% rename from tests/providers/common/io/__init__.py rename to providers/tests/google/ads/__init__.py diff --git a/tests/providers/common/io/assets/__init__.py b/providers/tests/google/ads/hooks/__init__.py similarity index 100% rename from tests/providers/common/io/assets/__init__.py rename to providers/tests/google/ads/hooks/__init__.py diff --git a/tests/providers/google/ads/hooks/test_ads.py b/providers/tests/google/ads/hooks/test_ads.py similarity index 100% rename from tests/providers/google/ads/hooks/test_ads.py rename to providers/tests/google/ads/hooks/test_ads.py diff --git a/tests/providers/common/io/operators/__init__.py b/providers/tests/google/ads/operators/__init__.py similarity index 100% rename from tests/providers/common/io/operators/__init__.py rename to providers/tests/google/ads/operators/__init__.py diff --git a/tests/providers/google/ads/operators/test_ads.py b/providers/tests/google/ads/operators/test_ads.py similarity index 100% rename from tests/providers/google/ads/operators/test_ads.py rename to providers/tests/google/ads/operators/test_ads.py diff --git a/tests/providers/common/io/xcom/__init__.py b/providers/tests/google/ads/transfers/__init__.py similarity index 100% rename from tests/providers/common/io/xcom/__init__.py rename to providers/tests/google/ads/transfers/__init__.py diff --git a/tests/providers/google/ads/transfers/test_ads_to_gcs.py b/providers/tests/google/ads/transfers/test_ads_to_gcs.py similarity index 97% rename from tests/providers/google/ads/transfers/test_ads_to_gcs.py rename to providers/tests/google/ads/transfers/test_ads_to_gcs.py index a97131203fa13..3d9c428494493 100644 --- a/tests/providers/google/ads/transfers/test_ads_to_gcs.py +++ b/providers/tests/google/ads/transfers/test_ads_to_gcs.py @@ -19,7 +19,8 @@ from unittest import mock from airflow.providers.google.ads.transfers.ads_to_gcs import GoogleAdsToGcsOperator -from tests.providers.google.ads.operators.test_ads import ( + +from providers.tests.google.ads.operators.test_ads import ( BUCKET, CLIENT_IDS, FIELDS_TO_EXTRACT, diff --git a/tests/providers/common/sql/__init__.py b/providers/tests/google/assets/__init__.py similarity index 100% rename from tests/providers/common/sql/__init__.py rename to providers/tests/google/assets/__init__.py diff --git a/tests/providers/google/datasets/test_bigquery.py b/providers/tests/google/assets/test_bigquery.py similarity index 100% rename from tests/providers/google/datasets/test_bigquery.py rename to providers/tests/google/assets/test_bigquery.py diff --git a/tests/providers/common/sql/hooks/__init__.py b/providers/tests/google/cloud/__init__.py similarity index 100% rename from tests/providers/common/sql/hooks/__init__.py rename to providers/tests/google/cloud/__init__.py diff --git a/tests/providers/common/sql/operators/__init__.py b/providers/tests/google/cloud/_internal_client/__init__.py similarity index 100% rename from tests/providers/common/sql/operators/__init__.py rename to providers/tests/google/cloud/_internal_client/__init__.py diff --git a/tests/providers/google/cloud/_internal_client/test_secret_manager_client.py b/providers/tests/google/cloud/_internal_client/test_secret_manager_client.py similarity index 100% rename from tests/providers/google/cloud/_internal_client/test_secret_manager_client.py rename to providers/tests/google/cloud/_internal_client/test_secret_manager_client.py diff --git a/tests/providers/common/sql/sensors/__init__.py b/providers/tests/google/cloud/hooks/__init__.py similarity index 100% rename from tests/providers/common/sql/sensors/__init__.py rename to providers/tests/google/cloud/hooks/__init__.py diff --git a/tests/providers/google/cloud/hooks/test_automl.py b/providers/tests/google/cloud/hooks/test_automl.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_automl.py rename to providers/tests/google/cloud/hooks/test_automl.py index 47f956c6d1032..26db131a9eef6 100644 --- a/tests/providers/google/cloud/hooks/test_automl.py +++ b/providers/tests/google/cloud/hooks/test_automl.py @@ -25,7 +25,8 @@ from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id CREDENTIALS = "test-creds" TASK_ID = "test-automl-hook" diff --git a/tests/providers/google/cloud/hooks/test_bigquery.py b/providers/tests/google/cloud/hooks/test_bigquery.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_bigquery.py rename to providers/tests/google/cloud/hooks/test_bigquery.py diff --git a/tests/providers/google/cloud/hooks/test_bigquery_dts.py b/providers/tests/google/cloud/hooks/test_bigquery_dts.py similarity index 98% rename from tests/providers/google/cloud/hooks/test_bigquery_dts.py rename to providers/tests/google/cloud/hooks/test_bigquery_dts.py index 1f973a9f21d85..163ca25273b18 100644 --- a/tests/providers/google/cloud/hooks/test_bigquery_dts.py +++ b/providers/tests/google/cloud/hooks/test_bigquery_dts.py @@ -30,7 +30,8 @@ AsyncBiqQueryDataTransferServiceHook, BiqQueryDataTransferServiceHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id CREDENTIALS = "test-creds" PROJECT_ID = "id" diff --git a/tests/providers/google/cloud/hooks/test_bigquery_system.py b/providers/tests/google/cloud/hooks/test_bigquery_system.py similarity index 94% rename from tests/providers/google/cloud/hooks/test_bigquery_system.py rename to providers/tests/google/cloud/hooks/test_bigquery_system.py index 3076f4a123e37..676ab35035839 100644 --- a/tests/providers/google/cloud/hooks/test_bigquery_system.py +++ b/providers/tests/google/cloud/hooks/test_bigquery_system.py @@ -22,8 +22,9 @@ import pytest from airflow.providers.google.cloud.hooks import bigquery as hook -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY -from tests.test_utils.gcp_system_helpers import GoogleSystemTest + +from dev.tests_common.test_utils.gcp_system_helpers import GoogleSystemTest +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY @pytest.mark.system("google.cloud") diff --git a/tests/providers/google/cloud/hooks/test_bigtable.py b/providers/tests/google/cloud/hooks/test_bigtable.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_bigtable.py rename to providers/tests/google/cloud/hooks/test_bigtable.py index 4dda2fb009e76..f1929ba00d580 100644 --- a/tests/providers/google/cloud/hooks/test_bigtable.py +++ b/providers/tests/google/cloud/hooks/test_bigtable.py @@ -27,7 +27,8 @@ from airflow.providers.google.cloud.hooks.bigtable import BigtableHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, diff --git a/tests/providers/google/cloud/hooks/test_cloud_batch.py b/providers/tests/google/cloud/hooks/test_cloud_batch.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_cloud_batch.py rename to providers/tests/google/cloud/hooks/test_cloud_batch.py index de83b64fbe799..05cc26b6b479b 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_batch.py +++ b/providers/tests/google/cloud/hooks/test_cloud_batch.py @@ -25,7 +25,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.cloud_batch import CloudBatchAsyncHook, CloudBatchHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id pytestmark = pytest.mark.db_test diff --git a/tests/providers/google/cloud/hooks/test_cloud_build.py b/providers/tests/google/cloud/hooks/test_cloud_build.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_cloud_build.py rename to providers/tests/google/cloud/hooks/test_cloud_build.py index 7672d460d0fb2..e65a5cdb8ba37 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_build.py +++ b/providers/tests/google/cloud/hooks/test_cloud_build.py @@ -32,7 +32,8 @@ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.providers.google.cloud.hooks.cloud_build import CloudBuildAsyncHook, CloudBuildHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id PROJECT_ID = "cloud-build-project" LOCATION = "test-location" diff --git a/tests/providers/google/cloud/hooks/test_cloud_composer.py b/providers/tests/google/cloud/hooks/test_cloud_composer.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_cloud_composer.py rename to providers/tests/google/cloud/hooks/test_cloud_composer.py diff --git a/tests/providers/google/cloud/hooks/test_cloud_memorystore.py b/providers/tests/google/cloud/hooks/test_cloud_memorystore.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_cloud_memorystore.py rename to providers/tests/google/cloud/hooks/test_cloud_memorystore.py index 592acb82c2362..d96ebc5b4322a 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_memorystore.py +++ b/providers/tests/google/cloud/hooks/test_cloud_memorystore.py @@ -32,7 +32,8 @@ CloudMemorystoreHook, CloudMemorystoreMemcachedHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, diff --git a/tests/providers/google/cloud/hooks/test_cloud_run.py b/providers/tests/google/cloud/hooks/test_cloud_run.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_cloud_run.py rename to providers/tests/google/cloud/hooks/test_cloud_run.py index 48b76e1bca128..d23e96ff6e039 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_run.py +++ b/providers/tests/google/cloud/hooks/test_cloud_run.py @@ -41,7 +41,8 @@ CloudRunServiceAsyncHook, CloudRunServiceHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id @pytest.mark.db_test diff --git a/tests/providers/google/cloud/hooks/test_cloud_sql.py b/providers/tests/google/cloud/hooks/test_cloud_sql.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_cloud_sql.py rename to providers/tests/google/cloud/hooks/test_cloud_sql.py index a5d6e16664072..3365dc76aec37 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_sql.py +++ b/providers/tests/google/cloud/hooks/test_cloud_sql.py @@ -40,7 +40,8 @@ CloudSQLHook, CloudSqlProxyRunner, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service.py b/providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service.py rename to providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service.py index 672a80d2fae1e..a68bcd3afef2c 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service.py +++ b/providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service.py @@ -46,7 +46,8 @@ GcpTransferOperationStatus, gen_job_name, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, diff --git a/tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service_async.py b/providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service_async.py similarity index 98% rename from tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service_async.py rename to providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service_async.py index e05bacbbd2c30..968c1a95efbdf 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service_async.py +++ b/providers/tests/google/cloud/hooks/test_cloud_storage_transfer_service_async.py @@ -26,7 +26,8 @@ from airflow.providers.google.cloud.hooks.cloud_storage_transfer_service import ( CloudDataTransferServiceAsyncHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id TEST_PROJECT_ID = "project-id" TRANSFER_HOOK_PATH = "airflow.providers.google.cloud.hooks.cloud_storage_transfer_service" diff --git a/tests/providers/google/cloud/hooks/test_compute.py b/providers/tests/google/cloud/hooks/test_compute.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_compute.py rename to providers/tests/google/cloud/hooks/test_compute.py index f4d5da5414ac0..24a43ac3d87bb 100644 --- a/tests/providers/google/cloud/hooks/test_compute.py +++ b/providers/tests/google/cloud/hooks/test_compute.py @@ -25,7 +25,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook, GceOperationStatus -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, diff --git a/tests/providers/google/cloud/hooks/test_compute_ssh.py b/providers/tests/google/cloud/hooks/test_compute_ssh.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_compute_ssh.py rename to providers/tests/google/cloud/hooks/test_compute_ssh.py diff --git a/tests/providers/google/cloud/hooks/test_datacatalog.py b/providers/tests/google/cloud/hooks/test_datacatalog.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_datacatalog.py rename to providers/tests/google/cloud/hooks/test_datacatalog.py index c97cef7c4ae1b..29ce515e8886c 100644 --- a/tests/providers/google/cloud/hooks/test_datacatalog.py +++ b/providers/tests/google/cloud/hooks/test_datacatalog.py @@ -28,7 +28,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.datacatalog import CloudDataCatalogHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/test_dataflow.py b/providers/tests/google/cloud/hooks/test_dataflow.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_dataflow.py rename to providers/tests/google/cloud/hooks/test_dataflow.py diff --git a/tests/providers/google/cloud/hooks/test_dataform.py b/providers/tests/google/cloud/hooks/test_dataform.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_dataform.py rename to providers/tests/google/cloud/hooks/test_dataform.py index d5c94e40d5f97..bedf91dadf6e4 100644 --- a/tests/providers/google/cloud/hooks/test_dataform.py +++ b/providers/tests/google/cloud/hooks/test_dataform.py @@ -25,7 +25,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.dataform import DataformHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id pytestmark = pytest.mark.db_test diff --git a/tests/providers/google/cloud/hooks/test_datafusion.py b/providers/tests/google/cloud/hooks/test_datafusion.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_datafusion.py rename to providers/tests/google/cloud/hooks/test_datafusion.py index 7358c334fda11..271662f7bca5e 100644 --- a/tests/providers/google/cloud/hooks/test_datafusion.py +++ b/providers/tests/google/cloud/hooks/test_datafusion.py @@ -27,7 +27,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.datafusion import DataFusionAsyncHook, DataFusionHook from airflow.providers.google.cloud.utils.datafusion import DataFusionPipelineType -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id API_VERSION = "v1beta1" GCP_CONN_ID = "google_cloud_default" diff --git a/tests/providers/google/cloud/hooks/test_datapipeline.py b/providers/tests/google/cloud/hooks/test_datapipeline.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_datapipeline.py rename to providers/tests/google/cloud/hooks/test_datapipeline.py diff --git a/tests/providers/google/cloud/hooks/test_dataplex.py b/providers/tests/google/cloud/hooks/test_dataplex.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_dataplex.py rename to providers/tests/google/cloud/hooks/test_dataplex.py index deca942a02fc2..9221a0e094653 100644 --- a/tests/providers/google/cloud/hooks/test_dataplex.py +++ b/providers/tests/google/cloud/hooks/test_dataplex.py @@ -22,7 +22,8 @@ from google.api_core.gapic_v1.method import DEFAULT from airflow.providers.google.cloud.operators.dataplex import DataplexHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id BASE_STRING = "airflow.providers.google.common.hooks.base_google.{}" DATAPLEX_STRING = "airflow.providers.google.cloud.hooks.dataplex.{}" diff --git a/tests/providers/google/cloud/hooks/test_dataprep.py b/providers/tests/google/cloud/hooks/test_dataprep.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_dataprep.py rename to providers/tests/google/cloud/hooks/test_dataprep.py diff --git a/tests/providers/google/cloud/hooks/test_dataproc.py b/providers/tests/google/cloud/hooks/test_dataproc.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_dataproc.py rename to providers/tests/google/cloud/hooks/test_dataproc.py diff --git a/tests/providers/google/cloud/hooks/test_dataproc_metastore.py b/providers/tests/google/cloud/hooks/test_dataproc_metastore.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_dataproc_metastore.py rename to providers/tests/google/cloud/hooks/test_dataproc_metastore.py index 28f506ce301b8..58a04d6bc8351 100644 --- a/tests/providers/google/cloud/hooks/test_dataproc_metastore.py +++ b/providers/tests/google/cloud/hooks/test_dataproc_metastore.py @@ -23,7 +23,8 @@ from google.api_core.gapic_v1.method import DEFAULT from airflow.providers.google.cloud.hooks.dataproc_metastore import DataprocMetastoreHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/test_datastore.py b/providers/tests/google/cloud/hooks/test_datastore.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_datastore.py rename to providers/tests/google/cloud/hooks/test_datastore.py diff --git a/tests/providers/google/cloud/hooks/test_dlp.py b/providers/tests/google/cloud/hooks/test_dlp.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_dlp.py rename to providers/tests/google/cloud/hooks/test_dlp.py index 186d5de1732b3..f0c63fcd23625 100644 --- a/tests/providers/google/cloud/hooks/test_dlp.py +++ b/providers/tests/google/cloud/hooks/test_dlp.py @@ -33,7 +33,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.dlp import CloudDLPHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id API_RESPONSE: dict[Any, Any] = {} ORGANIZATION_ID = "test-org" diff --git a/tests/providers/google/cloud/hooks/test_functions.py b/providers/tests/google/cloud/hooks/test_functions.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_functions.py rename to providers/tests/google/cloud/hooks/test_functions.py index f7bb02b1e2c9e..4e3c48795117c 100644 --- a/tests/providers/google/cloud/hooks/test_functions.py +++ b/providers/tests/google/cloud/hooks/test_functions.py @@ -24,7 +24,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.functions import CloudFunctionsHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, get_open_mock, mock_base_gcp_hook_default_project_id, diff --git a/tests/providers/google/cloud/hooks/test_gcs.py b/providers/tests/google/cloud/hooks/test_gcs.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_gcs.py rename to providers/tests/google/cloud/hooks/test_gcs.py index 5d2735834a958..464534bd11d93 100644 --- a/tests/providers/google/cloud/hooks/test_gcs.py +++ b/providers/tests/google/cloud/hooks/test_gcs.py @@ -41,7 +41,8 @@ from airflow.providers.google.common.consts import CLIENT_INFO from airflow.utils import timezone from airflow.version import version -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id BASE_STRING = "airflow.providers.google.common.hooks.base_google.{}" GCS_STRING = "airflow.providers.google.cloud.hooks.gcs.{}" diff --git a/tests/providers/google/cloud/hooks/test_gdm.py b/providers/tests/google/cloud/hooks/test_gdm.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_gdm.py rename to providers/tests/google/cloud/hooks/test_gdm.py diff --git a/tests/providers/google/cloud/hooks/test_kms.py b/providers/tests/google/cloud/hooks/test_kms.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_kms.py rename to providers/tests/google/cloud/hooks/test_kms.py diff --git a/tests/providers/google/cloud/hooks/test_kms_system.py b/providers/tests/google/cloud/hooks/test_kms_system.py similarity index 96% rename from tests/providers/google/cloud/hooks/test_kms_system.py rename to providers/tests/google/cloud/hooks/test_kms_system.py index afb4de1d1d04f..374fabb5246dd 100644 --- a/tests/providers/google/cloud/hooks/test_kms_system.py +++ b/providers/tests/google/cloud/hooks/test_kms_system.py @@ -23,8 +23,9 @@ import pytest from airflow.providers.google.cloud.hooks.kms import CloudKMSHook -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_KMS_KEY -from tests.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context + +from dev.tests_common.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_KMS_KEY # To prevent resource name collisions, key ring and key resources CANNOT be deleted, so # to avoid cluttering the project, we only create the key once during project initialization. diff --git a/tests/providers/google/cloud/hooks/test_kubernetes_engine.py b/providers/tests/google/cloud/hooks/test_kubernetes_engine.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_kubernetes_engine.py rename to providers/tests/google/cloud/hooks/test_kubernetes_engine.py index 479cceed05815..e535323f4d4cf 100644 --- a/tests/providers/google/cloud/hooks/test_kubernetes_engine.py +++ b/providers/tests/google/cloud/hooks/test_kubernetes_engine.py @@ -34,7 +34,8 @@ GKEKubernetesHook, ) from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id TASK_ID = "test-gke-cluster-operator" CLUSTER_NAME = "test-cluster" diff --git a/tests/providers/google/cloud/hooks/test_life_sciences.py b/providers/tests/google/cloud/hooks/test_life_sciences.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_life_sciences.py rename to providers/tests/google/cloud/hooks/test_life_sciences.py index 582b856b22c8b..ca113ec648f55 100644 --- a/tests/providers/google/cloud/hooks/test_life_sciences.py +++ b/providers/tests/google/cloud/hooks/test_life_sciences.py @@ -28,7 +28,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.life_sciences import LifeSciencesHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, diff --git a/tests/providers/google/cloud/hooks/test_looker.py b/providers/tests/google/cloud/hooks/test_looker.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_looker.py rename to providers/tests/google/cloud/hooks/test_looker.py diff --git a/tests/providers/google/cloud/hooks/test_mlengine.py b/providers/tests/google/cloud/hooks/test_mlengine.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_mlengine.py rename to providers/tests/google/cloud/hooks/test_mlengine.py index 7e8d7d68032fb..85ac30327d4c7 100644 --- a/tests/providers/google/cloud/hooks/test_mlengine.py +++ b/providers/tests/google/cloud/hooks/test_mlengine.py @@ -31,7 +31,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks import mlengine as hook from airflow.providers.google.cloud.hooks.mlengine import MLEngineAsyncHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/test_natural_language.py b/providers/tests/google/cloud/hooks/test_natural_language.py similarity index 98% rename from tests/providers/google/cloud/hooks/test_natural_language.py rename to providers/tests/google/cloud/hooks/test_natural_language.py index 22ba3d87c10f5..a3228439b10eb 100644 --- a/tests/providers/google/cloud/hooks/test_natural_language.py +++ b/providers/tests/google/cloud/hooks/test_natural_language.py @@ -26,7 +26,8 @@ from airflow.providers.google.cloud.hooks.natural_language import CloudNaturalLanguageHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id API_RESPONSE: dict[Any, Any] = {} DOCUMENT = Document( diff --git a/tests/providers/google/cloud/hooks/test_os_login.py b/providers/tests/google/cloud/hooks/test_os_login.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_os_login.py rename to providers/tests/google/cloud/hooks/test_os_login.py index 568e96610c762..48861f6954692 100644 --- a/tests/providers/google/cloud/hooks/test_os_login.py +++ b/providers/tests/google/cloud/hooks/test_os_login.py @@ -24,7 +24,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.os_login import OSLoginHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/test_pubsub.py b/providers/tests/google/cloud/hooks/test_pubsub.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_pubsub.py rename to providers/tests/google/cloud/hooks/test_pubsub.py diff --git a/tests/providers/google/cloud/hooks/test_secret_manager.py b/providers/tests/google/cloud/hooks/test_secret_manager.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_secret_manager.py rename to providers/tests/google/cloud/hooks/test_secret_manager.py index a800f1f785b8c..e758f76a9dcd7 100644 --- a/tests/providers/google/cloud/hooks/test_secret_manager.py +++ b/providers/tests/google/cloud/hooks/test_secret_manager.py @@ -29,7 +29,8 @@ SecretsManagerHook, ) from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/test_secret_manager_system.py b/providers/tests/google/cloud/hooks/test_secret_manager_system.py similarity index 95% rename from tests/providers/google/cloud/hooks/test_secret_manager_system.py rename to providers/tests/google/cloud/hooks/test_secret_manager_system.py index 6aacc078b9a28..d13305f52b42e 100644 --- a/tests/providers/google/cloud/hooks/test_secret_manager_system.py +++ b/providers/tests/google/cloud/hooks/test_secret_manager_system.py @@ -21,8 +21,9 @@ import pytest from airflow.providers.google.cloud.hooks.secret_manager import SecretsManagerHook -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_SECRET_MANAGER_KEY -from tests.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context + +from dev.tests_common.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_SECRET_MANAGER_KEY TEST_SECRET_ID = os.environ.get("GCP_SECRET_MANAGER_SECRET_ID", "test-secret") TEST_SECRET_VALUE = os.environ.get("GCP_SECRET_MANAGER_SECRET_VALUE", "test-secret-value") diff --git a/tests/providers/google/cloud/hooks/test_spanner.py b/providers/tests/google/cloud/hooks/test_spanner.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_spanner.py rename to providers/tests/google/cloud/hooks/test_spanner.py index 1633d558166d4..3f6af9dcdcaf2 100644 --- a/tests/providers/google/cloud/hooks/test_spanner.py +++ b/providers/tests/google/cloud/hooks/test_spanner.py @@ -25,7 +25,8 @@ from airflow.providers.google.cloud.hooks.spanner import SpannerHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, diff --git a/tests/providers/google/cloud/hooks/test_speech_to_text.py b/providers/tests/google/cloud/hooks/test_speech_to_text.py similarity index 97% rename from tests/providers/google/cloud/hooks/test_speech_to_text.py rename to providers/tests/google/cloud/hooks/test_speech_to_text.py index 8cda071976245..97ef56f790b48 100644 --- a/tests/providers/google/cloud/hooks/test_speech_to_text.py +++ b/providers/tests/google/cloud/hooks/test_speech_to_text.py @@ -25,7 +25,8 @@ from airflow.providers.google.cloud.hooks.speech_to_text import CloudSpeechToTextHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id PROJECT_ID = "project-id" CONFIG = {"encoding": "LINEAR16"} diff --git a/tests/providers/google/cloud/hooks/test_stackdriver.py b/providers/tests/google/cloud/hooks/test_stackdriver.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_stackdriver.py rename to providers/tests/google/cloud/hooks/test_stackdriver.py diff --git a/tests/providers/google/cloud/hooks/test_tasks.py b/providers/tests/google/cloud/hooks/test_tasks.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_tasks.py rename to providers/tests/google/cloud/hooks/test_tasks.py index 9885248db45e0..71228ba7e7178 100644 --- a/tests/providers/google/cloud/hooks/test_tasks.py +++ b/providers/tests/google/cloud/hooks/test_tasks.py @@ -26,7 +26,8 @@ from airflow.providers.google.cloud.hooks.tasks import CloudTasksHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id API_RESPONSE: dict[Any, Any] = {} PROJECT_ID = "test-project" diff --git a/tests/providers/google/cloud/hooks/test_text_to_speech.py b/providers/tests/google/cloud/hooks/test_text_to_speech.py similarity index 97% rename from tests/providers/google/cloud/hooks/test_text_to_speech.py rename to providers/tests/google/cloud/hooks/test_text_to_speech.py index 573c56e098f58..4f3eb90684499 100644 --- a/tests/providers/google/cloud/hooks/test_text_to_speech.py +++ b/providers/tests/google/cloud/hooks/test_text_to_speech.py @@ -29,7 +29,8 @@ from airflow.providers.google.cloud.hooks.text_to_speech import CloudTextToSpeechHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id INPUT = {"text": "test text"} VOICE = {"language_code": "en-US", "ssml_gender": "FEMALE"} diff --git a/tests/providers/google/cloud/hooks/test_translate.py b/providers/tests/google/cloud/hooks/test_translate.py similarity index 98% rename from tests/providers/google/cloud/hooks/test_translate.py rename to providers/tests/google/cloud/hooks/test_translate.py index 068fe60fa373c..addc54d430e7a 100644 --- a/tests/providers/google/cloud/hooks/test_translate.py +++ b/providers/tests/google/cloud/hooks/test_translate.py @@ -23,7 +23,8 @@ from airflow.providers.google.cloud.hooks.translate import CloudTranslateHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id PROJECT_ID_TEST = "project-id" diff --git a/tests/providers/google/cloud/hooks/test_video_intelligence.py b/providers/tests/google/cloud/hooks/test_video_intelligence.py similarity index 98% rename from tests/providers/google/cloud/hooks/test_video_intelligence.py rename to providers/tests/google/cloud/hooks/test_video_intelligence.py index 75bb9b92f71c8..17e047e2da956 100644 --- a/tests/providers/google/cloud/hooks/test_video_intelligence.py +++ b/providers/tests/google/cloud/hooks/test_video_intelligence.py @@ -25,7 +25,8 @@ from airflow.providers.google.cloud.hooks.video_intelligence import CloudVideoIntelligenceHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id INPUT_URI = "gs://bucket-name/input-file" OUTPUT_URI = "gs://bucket-name/output-file" diff --git a/tests/providers/google/cloud/hooks/test_vision.py b/providers/tests/google/cloud/hooks/test_vision.py similarity index 99% rename from tests/providers/google/cloud/hooks/test_vision.py rename to providers/tests/google/cloud/hooks/test_vision.py index 814e412ee0fc5..6a414d59e1a19 100644 --- a/tests/providers/google/cloud/hooks/test_vision.py +++ b/providers/tests/google/cloud/hooks/test_vision.py @@ -37,7 +37,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.vision import ERR_DIFF_NAMES, ERR_UNABLE_TO_CREATE, CloudVisionHook from airflow.providers.google.common.consts import CLIENT_INFO -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id PROJECT_ID_TEST = "project-id" PROJECT_ID_TEST_2 = "project-id-2" diff --git a/tests/providers/google/cloud/hooks/test_workflows.py b/providers/tests/google/cloud/hooks/test_workflows.py similarity index 100% rename from tests/providers/google/cloud/hooks/test_workflows.py rename to providers/tests/google/cloud/hooks/test_workflows.py diff --git a/tests/providers/databricks/plugins/__init__.py b/providers/tests/google/cloud/hooks/vertex_ai/__init__.py similarity index 100% rename from tests/providers/databricks/plugins/__init__.py rename to providers/tests/google/cloud/hooks/vertex_ai/__init__.py diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_auto_ml.py b/providers/tests/google/cloud/hooks/vertex_ai/test_auto_ml.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_auto_ml.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_auto_ml.py index 2e90cb7a1778b..6b64c966f54c9 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_auto_ml.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_auto_ml.py @@ -27,7 +27,8 @@ from google.api_core.gapic_v1.method import DEFAULT from airflow.providers.google.cloud.hooks.vertex_ai.auto_ml import AutoMLHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py b/providers/tests/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py index 1b2e0e1fbb9eb..15efe3e88bac4 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py @@ -33,7 +33,8 @@ BatchPredictionJobAsyncHook, BatchPredictionJobHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py b/providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py index c6f31c48470e2..a2cedce2a92a5 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_custom_job.py @@ -36,7 +36,8 @@ PipelineState, types, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_dataset.py b/providers/tests/google/cloud/hooks/vertex_ai/test_dataset.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_dataset.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_dataset.py index f20c9142a2c09..d54eb48c2d686 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_dataset.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_dataset.py @@ -27,7 +27,8 @@ from google.api_core.gapic_v1.method import DEFAULT from airflow.providers.google.cloud.hooks.vertex_ai.dataset import DatasetHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_endpoint_service.py b/providers/tests/google/cloud/hooks/vertex_ai/test_endpoint_service.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_endpoint_service.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_endpoint_service.py index 273a97fa19af9..a284eae13a41b 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_endpoint_service.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_endpoint_service.py @@ -27,7 +27,8 @@ from google.api_core.gapic_v1.method import DEFAULT from airflow.providers.google.cloud.hooks.vertex_ai.endpoint_service import EndpointServiceHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_generative_model.py b/providers/tests/google/cloud/hooks/vertex_ai/test_generative_model.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_generative_model.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_generative_model.py index 19723a51b1ddb..52a8c417c6ac2 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_generative_model.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_generative_model.py @@ -33,7 +33,8 @@ from airflow.providers.google.cloud.hooks.vertex_ai.generative_model import ( GenerativeModelHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py b/providers/tests/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py index eca6fc17ebaf9..05fbf9c123447 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py @@ -37,7 +37,8 @@ HyperparameterTuningJobAsyncHook, HyperparameterTuningJobHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_model_service.py b/providers/tests/google/cloud/hooks/vertex_ai/test_model_service.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_model_service.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_model_service.py index 1505e8276d077..c95d3fecd492f 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_model_service.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_model_service.py @@ -27,7 +27,8 @@ from google.api_core.gapic_v1.method import DEFAULT from airflow.providers.google.cloud.hooks.vertex_ai.model_service import ModelServiceHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_pipeline_job.py b/providers/tests/google/cloud/hooks/vertex_ai/test_pipeline_job.py similarity index 99% rename from tests/providers/google/cloud/hooks/vertex_ai/test_pipeline_job.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_pipeline_job.py index 02068ec57051f..21937251c3c53 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_pipeline_job.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_pipeline_job.py @@ -34,7 +34,8 @@ PipelineState, types, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/google/cloud/hooks/vertex_ai/test_prediction_service.py b/providers/tests/google/cloud/hooks/vertex_ai/test_prediction_service.py similarity index 98% rename from tests/providers/google/cloud/hooks/vertex_ai/test_prediction_service.py rename to providers/tests/google/cloud/hooks/vertex_ai/test_prediction_service.py index 987578b7c11ea..7e56ca92813a5 100644 --- a/tests/providers/google/cloud/hooks/vertex_ai/test_prediction_service.py +++ b/providers/tests/google/cloud/hooks/vertex_ai/test_prediction_service.py @@ -28,7 +28,8 @@ from airflow.providers.google.cloud.hooks.vertex_ai.prediction_service import ( PredictionServiceHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) diff --git a/tests/providers/databricks/sensors/__init__.py b/providers/tests/google/cloud/links/__init__.py similarity index 100% rename from tests/providers/databricks/sensors/__init__.py rename to providers/tests/google/cloud/links/__init__.py diff --git a/tests/providers/google/cloud/links/test_translate.py b/providers/tests/google/cloud/links/test_translate.py similarity index 100% rename from tests/providers/google/cloud/links/test_translate.py rename to providers/tests/google/cloud/links/test_translate.py diff --git a/tests/providers/databricks/utils/__init__.py b/providers/tests/google/cloud/log/__init__.py similarity index 100% rename from tests/providers/databricks/utils/__init__.py rename to providers/tests/google/cloud/log/__init__.py diff --git a/tests/providers/google/cloud/log/test_gcs_task_handler.py b/providers/tests/google/cloud/log/test_gcs_task_handler.py similarity index 98% rename from tests/providers/google/cloud/log/test_gcs_task_handler.py rename to providers/tests/google/cloud/log/test_gcs_task_handler.py index 7653c4d837be4..9f27c618de29d 100644 --- a/tests/providers/google/cloud/log/test_gcs_task_handler.py +++ b/providers/tests/google/cloud/log/test_gcs_task_handler.py @@ -27,8 +27,9 @@ from airflow.providers.google.cloud.log.gcs_task_handler import GCSTaskHandler from airflow.utils.state import TaskInstanceState from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs @pytest.mark.db_test diff --git a/tests/providers/google/cloud/log/test_gcs_task_handler_system.py b/providers/tests/google/cloud/log/test_gcs_task_handler_system.py similarity index 84% rename from tests/providers/google/cloud/log/test_gcs_task_handler_system.py rename to providers/tests/google/cloud/log/test_gcs_task_handler_system.py index 406bb0387df57..eb1ce08d14f5c 100644 --- a/tests/providers/google/cloud/log/test_gcs_task_handler_system.py +++ b/providers/tests/google/cloud/log/test_gcs_task_handler_system.py @@ -29,14 +29,15 @@ from airflow.models import DagBag, TaskInstance from airflow.utils.log.log_reader import TaskLogReader from airflow.utils.session import provide_session -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_GCS_KEY -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_connections, clear_db_runs -from tests.test_utils.gcp_system_helpers import ( + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_connections, clear_db_runs +from dev.tests_common.test_utils.gcp_system_helpers import ( GoogleSystemTest, provide_gcp_context, resolve_full_gcp_key_path, ) +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_GCS_KEY @pytest.mark.system("google") @@ -84,12 +85,15 @@ def test_should_read_logs(self, session): self.assert_remote_logs("INFO - Task exited with return code 0", ti) def assert_remote_logs(self, expected_message, ti): - with provide_gcp_context(GCP_GCS_KEY), conf_vars( - { - ("logging", "remote_logging"): "True", - ("logging", "remote_base_log_folder"): f"gs://{self.bucket_name}/path/to/logs", - ("logging", "remote_log_conn_id"): "google_cloud_default", - } + with ( + provide_gcp_context(GCP_GCS_KEY), + conf_vars( + { + ("logging", "remote_logging"): "True", + ("logging", "remote_base_log_folder"): f"gs://{self.bucket_name}/path/to/logs", + ("logging", "remote_log_conn_id"): "google_cloud_default", + } + ), ): from airflow.config_templates import airflow_local_settings diff --git a/tests/providers/google/cloud/log/test_stackdriver_task_handler.py b/providers/tests/google/cloud/log/test_stackdriver_task_handler.py similarity index 99% rename from tests/providers/google/cloud/log/test_stackdriver_task_handler.py rename to providers/tests/google/cloud/log/test_stackdriver_task_handler.py index 1ac8b91df5a65..783f1a34b762c 100644 --- a/tests/providers/google/cloud/log/test_stackdriver_task_handler.py +++ b/providers/tests/google/cloud/log/test_stackdriver_task_handler.py @@ -29,9 +29,10 @@ from airflow.providers.google.cloud.log.stackdriver_task_handler import StackdriverTaskHandler from airflow.utils import timezone from airflow.utils.state import TaskInstanceState -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs def _create_list_log_entries_response_mock(messages, token): diff --git a/tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py b/providers/tests/google/cloud/log/test_stackdriver_task_handler_system.py similarity index 88% rename from tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py rename to providers/tests/google/cloud/log/test_stackdriver_task_handler_system.py index b5daac0d81495..a53dd43d08feb 100644 --- a/tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py +++ b/providers/tests/google/cloud/log/test_stackdriver_task_handler_system.py @@ -29,14 +29,15 @@ from airflow.models import TaskInstance from airflow.utils.log.log_reader import TaskLogReader from airflow.utils.session import provide_session -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_STACKDRIVER -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_runs -from tests.test_utils.gcp_system_helpers import ( + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_runs +from dev.tests_common.test_utils.gcp_system_helpers import ( GoogleSystemTest, provide_gcp_context, resolve_full_gcp_key_path, ) +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_STACKDRIVER @pytest.mark.system("google") @@ -86,11 +87,14 @@ def test_should_support_adc(self, session): self.assert_remote_logs("terminated with exit code 0", ti) def assert_remote_logs(self, expected_message, ti): - with provide_gcp_context(GCP_STACKDRIVER), conf_vars( - { - ("logging", "remote_logging"): "True", - ("logging", "remote_base_log_folder"): f"stackdriver://{self.log_name}", - } + with ( + provide_gcp_context(GCP_STACKDRIVER), + conf_vars( + { + ("logging", "remote_logging"): "True", + ("logging", "remote_base_log_folder"): f"stackdriver://{self.log_name}", + } + ), ): from airflow.config_templates import airflow_local_settings diff --git a/tests/providers/dbt/__init__.py b/providers/tests/google/cloud/openlineage/__init__.py similarity index 100% rename from tests/providers/dbt/__init__.py rename to providers/tests/google/cloud/openlineage/__init__.py diff --git a/tests/providers/google/cloud/openlineage/test_mixins.py b/providers/tests/google/cloud/openlineage/test_mixins.py similarity index 95% rename from tests/providers/google/cloud/openlineage/test_mixins.py rename to providers/tests/google/cloud/openlineage/test_mixins.py index f7feade65d361..5229db3a6071d 100644 --- a/tests/providers/google/cloud/openlineage/test_mixins.py +++ b/providers/tests/google/cloud/openlineage/test_mixins.py @@ -17,6 +17,7 @@ from __future__ import annotations import json +import os from unittest.mock import MagicMock import pytest @@ -35,17 +36,17 @@ ) -def read_file_json(file): - with open(file=file) as f: - return json.loads(f.read()) +def read_common_json_file(rel: str): + with open(os.path.dirname(__file__) + "/../utils/" + rel) as f: + return json.load(f) class TableMock(MagicMock): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.inputs = [ - read_file_json("tests/providers/google/cloud/utils/table_details.json"), - read_file_json("tests/providers/google/cloud/utils/out_table_details.json"), + read_common_json_file("table_details.json"), + read_common_json_file("out_table_details.json"), ] @property @@ -55,8 +56,8 @@ def _properties(self): class TestBigQueryOpenLineageMixin: def setup_method(self): - self.job_details = read_file_json("tests/providers/google/cloud/utils/job_details.json") - self.script_job_details = read_file_json("tests/providers/google/cloud/utils/script_job_details.json") + self.job_details = read_common_json_file("job_details.json") + self.script_job_details = read_common_json_file("script_job_details.json") hook = MagicMock() self.client = MagicMock() diff --git a/tests/providers/google/cloud/openlineage/test_utils.py b/providers/tests/google/cloud/openlineage/test_utils.py similarity index 97% rename from tests/providers/google/cloud/openlineage/test_utils.py rename to providers/tests/google/cloud/openlineage/test_utils.py index e47f14332f455..4f2db0038b7b7 100644 --- a/tests/providers/google/cloud/openlineage/test_utils.py +++ b/providers/tests/google/cloud/openlineage/test_utils.py @@ -65,8 +65,8 @@ class TableMock(MagicMock): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.inputs = [ - read_file_json("tests/providers/google/cloud/utils/table_details.json"), - read_file_json("tests/providers/google/cloud/utils/out_table_details.json"), + read_file_json("providers/tests/google/cloud/utils/table_details.json"), + read_file_json("providers/tests/google/cloud/utils/out_table_details.json"), ] @property diff --git a/tests/providers/dbt/cloud/__init__.py b/providers/tests/google/cloud/operators/__init__.py similarity index 100% rename from tests/providers/dbt/cloud/__init__.py rename to providers/tests/google/cloud/operators/__init__.py diff --git a/tests/providers/dbt/cloud/hooks/__init__.py b/providers/tests/google/cloud/operators/source/__init__.py similarity index 100% rename from tests/providers/dbt/cloud/hooks/__init__.py rename to providers/tests/google/cloud/operators/source/__init__.py diff --git a/tests/providers/dbt/cloud/operators/__init__.py b/providers/tests/google/cloud/operators/source/source_prefix/__init__.py similarity index 100% rename from tests/providers/dbt/cloud/operators/__init__.py rename to providers/tests/google/cloud/operators/source/source_prefix/__init__.py diff --git a/tests/providers/google/cloud/operators/test_automl.py b/providers/tests/google/cloud/operators/test_automl.py similarity index 100% rename from tests/providers/google/cloud/operators/test_automl.py rename to providers/tests/google/cloud/operators/test_automl.py diff --git a/tests/providers/google/cloud/operators/test_bigquery.py b/providers/tests/google/cloud/operators/test_bigquery.py similarity index 99% rename from tests/providers/google/cloud/operators/test_bigquery.py rename to providers/tests/google/cloud/operators/test_bigquery.py index ab9e8e6ac019f..3836d737662fc 100644 --- a/tests/providers/google/cloud/operators/test_bigquery.py +++ b/providers/tests/google/cloud/operators/test_bigquery.py @@ -18,6 +18,7 @@ from __future__ import annotations import json +import os from contextlib import suppress from unittest import mock from unittest.mock import ANY, MagicMock @@ -73,7 +74,13 @@ ) from airflow.serialization.serialized_objects import SerializedDAG from airflow.utils.timezone import datetime -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags, clear_db_xcom + +from dev.tests_common.test_utils.db import ( + clear_db_dags, + clear_db_runs, + clear_db_serialized_dags, + clear_db_xcom, +) pytestmark = pytest.mark.db_test @@ -1840,7 +1847,7 @@ def test_execute_openlineage_events(self, mock_hook): assert result == real_job_id - with open(file="tests/providers/google/cloud/utils/job_details.json") as f: + with open(os.path.dirname(__file__) + "/../utils/job_details.json") as f: job_details = json.loads(f.read()) mock_hook.return_value.get_client.return_value.get_job.return_value._properties = job_details mock_hook.return_value.get_client.return_value.get_table.side_effect = Exception() diff --git a/tests/providers/google/cloud/operators/test_bigquery_dts.py b/providers/tests/google/cloud/operators/test_bigquery_dts.py similarity index 98% rename from tests/providers/google/cloud/operators/test_bigquery_dts.py rename to providers/tests/google/cloud/operators/test_bigquery_dts.py index f44479bbce9e4..f50c42805b49d 100644 --- a/tests/providers/google/cloud/operators/test_bigquery_dts.py +++ b/providers/tests/google/cloud/operators/test_bigquery_dts.py @@ -27,7 +27,8 @@ BigQueryDataTransferServiceStartTransferRunsOperator, BigQueryDeleteDataTransferConfigOperator, ) -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS PROJECT_ID = "id" diff --git a/tests/providers/google/cloud/operators/test_bigtable.py b/providers/tests/google/cloud/operators/test_bigtable.py similarity index 100% rename from tests/providers/google/cloud/operators/test_bigtable.py rename to providers/tests/google/cloud/operators/test_bigtable.py diff --git a/tests/providers/google/cloud/operators/test_cloud_base.py b/providers/tests/google/cloud/operators/test_cloud_base.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_base.py rename to providers/tests/google/cloud/operators/test_cloud_base.py diff --git a/tests/providers/google/cloud/operators/test_cloud_batch.py b/providers/tests/google/cloud/operators/test_cloud_batch.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_batch.py rename to providers/tests/google/cloud/operators/test_cloud_batch.py diff --git a/tests/providers/google/cloud/operators/test_cloud_build.py b/providers/tests/google/cloud/operators/test_cloud_build.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_build.py rename to providers/tests/google/cloud/operators/test_cloud_build.py diff --git a/tests/providers/google/cloud/operators/test_cloud_composer.py b/providers/tests/google/cloud/operators/test_cloud_composer.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_composer.py rename to providers/tests/google/cloud/operators/test_cloud_composer.py diff --git a/tests/providers/google/cloud/operators/test_cloud_memorystore.py b/providers/tests/google/cloud/operators/test_cloud_memorystore.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_memorystore.py rename to providers/tests/google/cloud/operators/test_cloud_memorystore.py diff --git a/tests/providers/google/cloud/operators/test_cloud_run.py b/providers/tests/google/cloud/operators/test_cloud_run.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_run.py rename to providers/tests/google/cloud/operators/test_cloud_run.py diff --git a/tests/providers/google/cloud/operators/test_cloud_sql.py b/providers/tests/google/cloud/operators/test_cloud_sql.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_sql.py rename to providers/tests/google/cloud/operators/test_cloud_sql.py diff --git a/tests/providers/google/cloud/operators/test_cloud_storage_transfer_service.py b/providers/tests/google/cloud/operators/test_cloud_storage_transfer_service.py similarity index 100% rename from tests/providers/google/cloud/operators/test_cloud_storage_transfer_service.py rename to providers/tests/google/cloud/operators/test_cloud_storage_transfer_service.py diff --git a/tests/providers/google/cloud/operators/test_compute.py b/providers/tests/google/cloud/operators/test_compute.py similarity index 100% rename from tests/providers/google/cloud/operators/test_compute.py rename to providers/tests/google/cloud/operators/test_compute.py diff --git a/tests/providers/google/cloud/operators/test_datacatalog.py b/providers/tests/google/cloud/operators/test_datacatalog.py similarity index 100% rename from tests/providers/google/cloud/operators/test_datacatalog.py rename to providers/tests/google/cloud/operators/test_datacatalog.py diff --git a/tests/providers/google/cloud/operators/test_dataflow.py b/providers/tests/google/cloud/operators/test_dataflow.py similarity index 100% rename from tests/providers/google/cloud/operators/test_dataflow.py rename to providers/tests/google/cloud/operators/test_dataflow.py diff --git a/tests/providers/google/cloud/operators/test_dataform.py b/providers/tests/google/cloud/operators/test_dataform.py similarity index 100% rename from tests/providers/google/cloud/operators/test_dataform.py rename to providers/tests/google/cloud/operators/test_dataform.py diff --git a/tests/providers/google/cloud/operators/test_datafusion.py b/providers/tests/google/cloud/operators/test_datafusion.py similarity index 100% rename from tests/providers/google/cloud/operators/test_datafusion.py rename to providers/tests/google/cloud/operators/test_datafusion.py diff --git a/tests/providers/google/cloud/operators/test_datapipeline.py b/providers/tests/google/cloud/operators/test_datapipeline.py similarity index 100% rename from tests/providers/google/cloud/operators/test_datapipeline.py rename to providers/tests/google/cloud/operators/test_datapipeline.py diff --git a/tests/providers/google/cloud/operators/test_dataplex.py b/providers/tests/google/cloud/operators/test_dataplex.py similarity index 100% rename from tests/providers/google/cloud/operators/test_dataplex.py rename to providers/tests/google/cloud/operators/test_dataplex.py diff --git a/tests/providers/google/cloud/operators/test_dataprep.py b/providers/tests/google/cloud/operators/test_dataprep.py similarity index 100% rename from tests/providers/google/cloud/operators/test_dataprep.py rename to providers/tests/google/cloud/operators/test_dataprep.py diff --git a/tests/providers/google/cloud/operators/test_dataprep_system.py b/providers/tests/google/cloud/operators/test_dataprep_system.py similarity index 91% rename from tests/providers/google/cloud/operators/test_dataprep_system.py rename to providers/tests/google/cloud/operators/test_dataprep_system.py index fcba01fe599cc..96f47fa3e365d 100644 --- a/tests/providers/google/cloud/operators/test_dataprep_system.py +++ b/providers/tests/google/cloud/operators/test_dataprep_system.py @@ -24,8 +24,9 @@ from airflow.models import Connection from airflow.utils.session import create_session -from tests.test_utils.db import clear_db_connections -from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest + +from dev.tests_common.test_utils.db import clear_db_connections +from dev.tests_common.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest TOKEN = os.environ.get("DATAPREP_TOKEN") EXTRA = {"token": TOKEN} diff --git a/tests/providers/google/cloud/operators/test_dataproc.py b/providers/tests/google/cloud/operators/test_dataproc.py similarity index 99% rename from tests/providers/google/cloud/operators/test_dataproc.py rename to providers/tests/google/cloud/operators/test_dataproc.py index 58b38125ee1d3..cf4bffa3a0929 100644 --- a/tests/providers/google/cloud/operators/test_dataproc.py +++ b/providers/tests/google/cloud/operators/test_dataproc.py @@ -79,8 +79,9 @@ from airflow.providers.google.common.consts import GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME from airflow.serialization.serialized_objects import SerializedDAG from airflow.utils.timezone import datetime -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, AIRFLOW_VERSION -from tests.test_utils.db import clear_db_runs, clear_db_xcom + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, AIRFLOW_VERSION +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom AIRFLOW_VERSION_LABEL = "v" + str(AIRFLOW_VERSION).replace(".", "-").replace("+", "-") diff --git a/tests/providers/google/cloud/operators/test_dataproc_metastore.py b/providers/tests/google/cloud/operators/test_dataproc_metastore.py similarity index 100% rename from tests/providers/google/cloud/operators/test_dataproc_metastore.py rename to providers/tests/google/cloud/operators/test_dataproc_metastore.py diff --git a/tests/providers/google/cloud/operators/test_datastore.py b/providers/tests/google/cloud/operators/test_datastore.py similarity index 100% rename from tests/providers/google/cloud/operators/test_datastore.py rename to providers/tests/google/cloud/operators/test_datastore.py diff --git a/tests/providers/google/cloud/operators/test_datastore_system.py b/providers/tests/google/cloud/operators/test_datastore_system.py similarity index 89% rename from tests/providers/google/cloud/operators/test_datastore_system.py rename to providers/tests/google/cloud/operators/test_datastore_system.py index 5835a46673db1..a98215a5317a3 100644 --- a/tests/providers/google/cloud/operators/test_datastore_system.py +++ b/providers/tests/google/cloud/operators/test_datastore_system.py @@ -21,8 +21,12 @@ import pytest -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_DATASTORE_KEY -from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context +from dev.tests_common.test_utils.gcp_system_helpers import ( + CLOUD_DAG_FOLDER, + GoogleSystemTest, + provide_gcp_context, +) +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_DATASTORE_KEY BUCKET = os.environ.get("GCP_DATASTORE_BUCKET", "datastore-system-test") diff --git a/tests/providers/google/cloud/operators/test_dlp.py b/providers/tests/google/cloud/operators/test_dlp.py similarity index 100% rename from tests/providers/google/cloud/operators/test_dlp.py rename to providers/tests/google/cloud/operators/test_dlp.py diff --git a/tests/providers/google/cloud/operators/test_functions.py b/providers/tests/google/cloud/operators/test_functions.py similarity index 100% rename from tests/providers/google/cloud/operators/test_functions.py rename to providers/tests/google/cloud/operators/test_functions.py diff --git a/tests/providers/google/cloud/operators/test_gcs.py b/providers/tests/google/cloud/operators/test_gcs.py similarity index 100% rename from tests/providers/google/cloud/operators/test_gcs.py rename to providers/tests/google/cloud/operators/test_gcs.py diff --git a/tests/providers/google/cloud/operators/test_kubernetes_engine.py b/providers/tests/google/cloud/operators/test_kubernetes_engine.py similarity index 100% rename from tests/providers/google/cloud/operators/test_kubernetes_engine.py rename to providers/tests/google/cloud/operators/test_kubernetes_engine.py diff --git a/tests/providers/google/cloud/operators/test_life_sciences.py b/providers/tests/google/cloud/operators/test_life_sciences.py similarity index 100% rename from tests/providers/google/cloud/operators/test_life_sciences.py rename to providers/tests/google/cloud/operators/test_life_sciences.py diff --git a/tests/providers/google/cloud/operators/test_looker.py b/providers/tests/google/cloud/operators/test_looker.py similarity index 98% rename from tests/providers/google/cloud/operators/test_looker.py rename to providers/tests/google/cloud/operators/test_looker.py index 5dfea013a2516..b368259c503e0 100644 --- a/tests/providers/google/cloud/operators/test_looker.py +++ b/providers/tests/google/cloud/operators/test_looker.py @@ -25,7 +25,8 @@ from airflow.models import DAG, DagBag from airflow.providers.google.cloud.operators.looker import LookerStartPdtBuildOperator from airflow.utils.timezone import datetime -from tests.test_utils.db import clear_db_runs, clear_db_xcom + +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom OPERATOR_PATH = "airflow.providers.google.cloud.operators.looker.{}" diff --git a/tests/providers/google/cloud/operators/test_mlengine.py b/providers/tests/google/cloud/operators/test_mlengine.py similarity index 100% rename from tests/providers/google/cloud/operators/test_mlengine.py rename to providers/tests/google/cloud/operators/test_mlengine.py diff --git a/tests/providers/google/cloud/operators/test_natural_language.py b/providers/tests/google/cloud/operators/test_natural_language.py similarity index 100% rename from tests/providers/google/cloud/operators/test_natural_language.py rename to providers/tests/google/cloud/operators/test_natural_language.py diff --git a/tests/providers/google/cloud/operators/test_pubsub.py b/providers/tests/google/cloud/operators/test_pubsub.py similarity index 100% rename from tests/providers/google/cloud/operators/test_pubsub.py rename to providers/tests/google/cloud/operators/test_pubsub.py diff --git a/tests/providers/google/cloud/operators/test_spanner.py b/providers/tests/google/cloud/operators/test_spanner.py similarity index 100% rename from tests/providers/google/cloud/operators/test_spanner.py rename to providers/tests/google/cloud/operators/test_spanner.py diff --git a/tests/providers/google/cloud/operators/test_speech_to_text.py b/providers/tests/google/cloud/operators/test_speech_to_text.py similarity index 100% rename from tests/providers/google/cloud/operators/test_speech_to_text.py rename to providers/tests/google/cloud/operators/test_speech_to_text.py diff --git a/tests/providers/google/cloud/operators/test_stackdriver.py b/providers/tests/google/cloud/operators/test_stackdriver.py similarity index 100% rename from tests/providers/google/cloud/operators/test_stackdriver.py rename to providers/tests/google/cloud/operators/test_stackdriver.py diff --git a/tests/providers/google/cloud/operators/test_tasks.py b/providers/tests/google/cloud/operators/test_tasks.py similarity index 100% rename from tests/providers/google/cloud/operators/test_tasks.py rename to providers/tests/google/cloud/operators/test_tasks.py diff --git a/tests/providers/google/cloud/operators/test_text_to_speech.py b/providers/tests/google/cloud/operators/test_text_to_speech.py similarity index 100% rename from tests/providers/google/cloud/operators/test_text_to_speech.py rename to providers/tests/google/cloud/operators/test_text_to_speech.py diff --git a/tests/providers/google/cloud/operators/test_translate.py b/providers/tests/google/cloud/operators/test_translate.py similarity index 100% rename from tests/providers/google/cloud/operators/test_translate.py rename to providers/tests/google/cloud/operators/test_translate.py diff --git a/tests/providers/google/cloud/operators/test_translate_speech.py b/providers/tests/google/cloud/operators/test_translate_speech.py similarity index 100% rename from tests/providers/google/cloud/operators/test_translate_speech.py rename to providers/tests/google/cloud/operators/test_translate_speech.py diff --git a/tests/providers/google/cloud/operators/test_vertex_ai.py b/providers/tests/google/cloud/operators/test_vertex_ai.py similarity index 99% rename from tests/providers/google/cloud/operators/test_vertex_ai.py rename to providers/tests/google/cloud/operators/test_vertex_ai.py index 25deb188aaa4d..f79542006e26b 100644 --- a/tests/providers/google/cloud/operators/test_vertex_ai.py +++ b/providers/tests/google/cloud/operators/test_vertex_ai.py @@ -2178,9 +2178,12 @@ def test_execute_deferrable(self, mock_hook, mock_link_persist): deferrable=True, ) context = {"ti": mock.MagicMock()} - with pytest.raises(TaskDeferred) as exception_info, pytest.warns( - AirflowProviderDeprecationWarning, - match=SYNC_DEPRECATION_WARNING.format("28.08.2024"), + with ( + pytest.raises(TaskDeferred) as exception_info, + pytest.warns( + AirflowProviderDeprecationWarning, + match=SYNC_DEPRECATION_WARNING.format("28.08.2024"), + ), ): op.execute(context=context) @@ -2587,9 +2590,12 @@ def test_deferrable_sync_error(self): parallel_trial_count=3, deferrable=True, ) - with pytest.raises(AirflowException), pytest.warns( - AirflowProviderDeprecationWarning, - match=SYNC_DEPRECATION_WARNING.format("01.09.2024"), + with ( + pytest.raises(AirflowException), + pytest.warns( + AirflowProviderDeprecationWarning, + match=SYNC_DEPRECATION_WARNING.format("01.09.2024"), + ), ): op.execute(context={"ti": mock.MagicMock()}) diff --git a/tests/providers/google/cloud/operators/test_video_intelligence.py b/providers/tests/google/cloud/operators/test_video_intelligence.py similarity index 100% rename from tests/providers/google/cloud/operators/test_video_intelligence.py rename to providers/tests/google/cloud/operators/test_video_intelligence.py diff --git a/tests/providers/google/cloud/operators/test_vision.py b/providers/tests/google/cloud/operators/test_vision.py similarity index 100% rename from tests/providers/google/cloud/operators/test_vision.py rename to providers/tests/google/cloud/operators/test_vision.py diff --git a/tests/providers/google/cloud/operators/test_workflows.py b/providers/tests/google/cloud/operators/test_workflows.py similarity index 100% rename from tests/providers/google/cloud/operators/test_workflows.py rename to providers/tests/google/cloud/operators/test_workflows.py diff --git a/tests/providers/dbt/cloud/sensors/__init__.py b/providers/tests/google/cloud/operators/vertex_ai/__init__.py similarity index 100% rename from tests/providers/dbt/cloud/sensors/__init__.py rename to providers/tests/google/cloud/operators/vertex_ai/__init__.py diff --git a/tests/providers/google/cloud/operators/vertex_ai/test_generative_model.py b/providers/tests/google/cloud/operators/vertex_ai/test_generative_model.py similarity index 100% rename from tests/providers/google/cloud/operators/vertex_ai/test_generative_model.py rename to providers/tests/google/cloud/operators/vertex_ai/test_generative_model.py diff --git a/tests/providers/dbt/cloud/test_data/__init__.py b/providers/tests/google/cloud/secrets/__init__.py similarity index 100% rename from tests/providers/dbt/cloud/test_data/__init__.py rename to providers/tests/google/cloud/secrets/__init__.py diff --git a/tests/providers/google/cloud/secrets/test_secret_manager.py b/providers/tests/google/cloud/secrets/test_secret_manager.py similarity index 100% rename from tests/providers/google/cloud/secrets/test_secret_manager.py rename to providers/tests/google/cloud/secrets/test_secret_manager.py diff --git a/tests/providers/google/cloud/secrets/test_secret_manager_system.py b/providers/tests/google/cloud/secrets/test_secret_manager_system.py similarity index 95% rename from tests/providers/google/cloud/secrets/test_secret_manager_system.py rename to providers/tests/google/cloud/secrets/test_secret_manager_system.py index 6f0e88282dc92..b9b8b6d0cac82 100644 --- a/tests/providers/google/cloud/secrets/test_secret_manager_system.py +++ b/providers/tests/google/cloud/secrets/test_secret_manager_system.py @@ -23,8 +23,8 @@ import pytest -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_SECRET_MANAGER_KEY -from tests.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context +from dev.tests_common.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_SECRET_MANAGER_KEY BACKEND_IMPORT_PATH = "airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend" diff --git a/tests/providers/dbt/cloud/triggers/__init__.py b/providers/tests/google/cloud/sensors/__init__.py similarity index 100% rename from tests/providers/dbt/cloud/triggers/__init__.py rename to providers/tests/google/cloud/sensors/__init__.py diff --git a/tests/providers/google/cloud/sensors/test_bigquery.py b/providers/tests/google/cloud/sensors/test_bigquery.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_bigquery.py rename to providers/tests/google/cloud/sensors/test_bigquery.py diff --git a/tests/providers/google/cloud/sensors/test_bigquery_dts.py b/providers/tests/google/cloud/sensors/test_bigquery_dts.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_bigquery_dts.py rename to providers/tests/google/cloud/sensors/test_bigquery_dts.py diff --git a/tests/providers/google/cloud/sensors/test_bigtable.py b/providers/tests/google/cloud/sensors/test_bigtable.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_bigtable.py rename to providers/tests/google/cloud/sensors/test_bigtable.py diff --git a/tests/providers/google/cloud/sensors/test_cloud_composer.py b/providers/tests/google/cloud/sensors/test_cloud_composer.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_cloud_composer.py rename to providers/tests/google/cloud/sensors/test_cloud_composer.py diff --git a/tests/providers/google/cloud/sensors/test_cloud_storage_transfer_service.py b/providers/tests/google/cloud/sensors/test_cloud_storage_transfer_service.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_cloud_storage_transfer_service.py rename to providers/tests/google/cloud/sensors/test_cloud_storage_transfer_service.py diff --git a/tests/providers/google/cloud/sensors/test_dataflow.py b/providers/tests/google/cloud/sensors/test_dataflow.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_dataflow.py rename to providers/tests/google/cloud/sensors/test_dataflow.py diff --git a/tests/providers/google/cloud/sensors/test_datafusion.py b/providers/tests/google/cloud/sensors/test_datafusion.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_datafusion.py rename to providers/tests/google/cloud/sensors/test_datafusion.py diff --git a/tests/providers/google/cloud/sensors/test_dataplex.py b/providers/tests/google/cloud/sensors/test_dataplex.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_dataplex.py rename to providers/tests/google/cloud/sensors/test_dataplex.py diff --git a/tests/providers/google/cloud/sensors/test_dataprep.py b/providers/tests/google/cloud/sensors/test_dataprep.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_dataprep.py rename to providers/tests/google/cloud/sensors/test_dataprep.py diff --git a/tests/providers/google/cloud/sensors/test_dataproc.py b/providers/tests/google/cloud/sensors/test_dataproc.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_dataproc.py rename to providers/tests/google/cloud/sensors/test_dataproc.py diff --git a/tests/providers/google/cloud/sensors/test_dataproc_metastore.py b/providers/tests/google/cloud/sensors/test_dataproc_metastore.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_dataproc_metastore.py rename to providers/tests/google/cloud/sensors/test_dataproc_metastore.py diff --git a/tests/providers/google/cloud/sensors/test_gcs.py b/providers/tests/google/cloud/sensors/test_gcs.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_gcs.py rename to providers/tests/google/cloud/sensors/test_gcs.py diff --git a/tests/providers/google/cloud/sensors/test_looker.py b/providers/tests/google/cloud/sensors/test_looker.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_looker.py rename to providers/tests/google/cloud/sensors/test_looker.py diff --git a/tests/providers/google/cloud/sensors/test_pubsub.py b/providers/tests/google/cloud/sensors/test_pubsub.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_pubsub.py rename to providers/tests/google/cloud/sensors/test_pubsub.py diff --git a/tests/providers/google/cloud/sensors/test_tasks.py b/providers/tests/google/cloud/sensors/test_tasks.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_tasks.py rename to providers/tests/google/cloud/sensors/test_tasks.py diff --git a/tests/providers/google/cloud/sensors/test_workflows.py b/providers/tests/google/cloud/sensors/test_workflows.py similarity index 100% rename from tests/providers/google/cloud/sensors/test_workflows.py rename to providers/tests/google/cloud/sensors/test_workflows.py diff --git a/tests/providers/dbt/cloud/utils/__init__.py b/providers/tests/google/cloud/transfers/__init__.py similarity index 100% rename from tests/providers/dbt/cloud/utils/__init__.py rename to providers/tests/google/cloud/transfers/__init__.py diff --git a/tests/providers/google/cloud/transfers/test_adls_to_gcs.py b/providers/tests/google/cloud/transfers/test_adls_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_adls_to_gcs.py rename to providers/tests/google/cloud/transfers/test_adls_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_azure_blob_to_gcs.py b/providers/tests/google/cloud/transfers/test_azure_blob_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_azure_blob_to_gcs.py rename to providers/tests/google/cloud/transfers/test_azure_blob_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs.py b/providers/tests/google/cloud/transfers/test_azure_fileshare_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs.py rename to providers/tests/google/cloud/transfers/test_azure_fileshare_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py b/providers/tests/google/cloud/transfers/test_bigquery_to_bigquery.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py rename to providers/tests/google/cloud/transfers/test_bigquery_to_bigquery.py diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py b/providers/tests/google/cloud/transfers/test_bigquery_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py rename to providers/tests/google/cloud/transfers/test_bigquery_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_mssql.py b/providers/tests/google/cloud/transfers/test_bigquery_to_mssql.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_bigquery_to_mssql.py rename to providers/tests/google/cloud/transfers/test_bigquery_to_mssql.py diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py b/providers/tests/google/cloud/transfers/test_bigquery_to_mysql.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py rename to providers/tests/google/cloud/transfers/test_bigquery_to_mysql.py diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_postgres.py b/providers/tests/google/cloud/transfers/test_bigquery_to_postgres.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_bigquery_to_postgres.py rename to providers/tests/google/cloud/transfers/test_bigquery_to_postgres.py diff --git a/tests/providers/google/cloud/transfers/test_calendar_to_gcs.py b/providers/tests/google/cloud/transfers/test_calendar_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_calendar_to_gcs.py rename to providers/tests/google/cloud/transfers/test_calendar_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py b/providers/tests/google/cloud/transfers/test_cassandra_to_gcs.py similarity index 91% rename from tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py rename to providers/tests/google/cloud/transfers/test_cassandra_to_gcs.py index 75e3cbac1789b..caa9d5ea76a1a 100644 --- a/tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py +++ b/providers/tests/google/cloud/transfers/test_cassandra_to_gcs.py @@ -46,13 +46,17 @@ def test_execute(self): except cassandra.DependencyException: pytest.skip("cassandra-driver not installed with libev support. Skipping test.") - with mock.patch( - "airflow.providers.google.cloud.transfers.cassandra_to_gcs.NamedTemporaryFile" - ) as mock_tempfile, mock.patch( - "airflow.providers.google.cloud.transfers.cassandra_to_gcs.GCSHook.upload" - ) as mock_upload, mock.patch( - "airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraHook" - ) as mock_hook: + with ( + mock.patch( + "airflow.providers.google.cloud.transfers.cassandra_to_gcs.NamedTemporaryFile" + ) as mock_tempfile, + mock.patch( + "airflow.providers.google.cloud.transfers.cassandra_to_gcs.GCSHook.upload" + ) as mock_upload, + mock.patch( + "airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraHook" + ) as mock_hook, + ): mock_tempfile.return_value.name = TMP_FILE_NAME operator = CassandraToGCSOperator( task_id=TASK_ID, diff --git a/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs.py b/providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs.py rename to providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py b/providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs_system.py similarity index 93% rename from tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py rename to providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs_system.py index 85c6f69ae59ea..ba24a0c34da25 100644 --- a/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py +++ b/providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs_system.py @@ -26,8 +26,13 @@ from airflow.exceptions import AirflowException from airflow.models import Connection from airflow.utils.process_utils import patch_environ -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY -from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context + +from dev.tests_common.test_utils.gcp_system_helpers import ( + CLOUD_DAG_FOLDER, + GoogleSystemTest, + provide_gcp_context, +) +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY CREDENTIALS_DIR = os.environ.get("CREDENTIALS_DIR", "/files/airflow-breeze-config/keys") FACEBOOK_KEY = "facebook.json" diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py b/providers/tests/google/cloud/transfers/test_gcs_to_bigquery.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py rename to providers/tests/google/cloud/transfers/test_gcs_to_bigquery.py diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_gcs.py b/providers/tests/google/cloud/transfers/test_gcs_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_gcs_to_gcs.py rename to providers/tests/google/cloud/transfers/test_gcs_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_local.py b/providers/tests/google/cloud/transfers/test_gcs_to_local.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_gcs_to_local.py rename to providers/tests/google/cloud/transfers/test_gcs_to_local.py diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_sftp.py b/providers/tests/google/cloud/transfers/test_gcs_to_sftp.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_gcs_to_sftp.py rename to providers/tests/google/cloud/transfers/test_gcs_to_sftp.py diff --git a/tests/providers/google/cloud/transfers/test_gdrive_to_gcs.py b/providers/tests/google/cloud/transfers/test_gdrive_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_gdrive_to_gcs.py rename to providers/tests/google/cloud/transfers/test_gdrive_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_gdrive_to_local.py b/providers/tests/google/cloud/transfers/test_gdrive_to_local.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_gdrive_to_local.py rename to providers/tests/google/cloud/transfers/test_gdrive_to_local.py diff --git a/tests/providers/google/cloud/transfers/test_local_to_gcs.py b/providers/tests/google/cloud/transfers/test_local_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_local_to_gcs.py rename to providers/tests/google/cloud/transfers/test_local_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_mssql_to_gcs.py b/providers/tests/google/cloud/transfers/test_mssql_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_mssql_to_gcs.py rename to providers/tests/google/cloud/transfers/test_mssql_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py b/providers/tests/google/cloud/transfers/test_mysql_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_mysql_to_gcs.py rename to providers/tests/google/cloud/transfers/test_mysql_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_oracle_to_gcs.py b/providers/tests/google/cloud/transfers/test_oracle_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_oracle_to_gcs.py rename to providers/tests/google/cloud/transfers/test_oracle_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py b/providers/tests/google/cloud/transfers/test_postgres_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_postgres_to_gcs.py rename to providers/tests/google/cloud/transfers/test_postgres_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_s3_to_gcs.py b/providers/tests/google/cloud/transfers/test_s3_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_s3_to_gcs.py rename to providers/tests/google/cloud/transfers/test_s3_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py b/providers/tests/google/cloud/transfers/test_salesforce_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py rename to providers/tests/google/cloud/transfers/test_salesforce_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_salesforce_to_gcs_system.py b/providers/tests/google/cloud/transfers/test_salesforce_to_gcs_system.py similarity index 84% rename from tests/providers/google/cloud/transfers/test_salesforce_to_gcs_system.py rename to providers/tests/google/cloud/transfers/test_salesforce_to_gcs_system.py index c2eee9bd09110..afd0856fad244 100644 --- a/tests/providers/google/cloud/transfers/test_salesforce_to_gcs_system.py +++ b/providers/tests/google/cloud/transfers/test_salesforce_to_gcs_system.py @@ -20,9 +20,13 @@ import pytest -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY -from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context -from tests.test_utils.salesforce_system_helpers import provide_salesforce_connection +from dev.tests_common.test_utils.gcp_system_helpers import ( + CLOUD_DAG_FOLDER, + GoogleSystemTest, + provide_gcp_context, +) +from dev.tests_common.test_utils.salesforce_system_helpers import provide_salesforce_connection +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY CREDENTIALS_DIR = os.environ.get("CREDENTIALS_DIR", "/files/airflow-breeze-config/keys") SALESFORCE_KEY = "salesforce.json" diff --git a/tests/providers/google/cloud/transfers/test_sftp_to_gcs.py b/providers/tests/google/cloud/transfers/test_sftp_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_sftp_to_gcs.py rename to providers/tests/google/cloud/transfers/test_sftp_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_sheets_to_gcs.py b/providers/tests/google/cloud/transfers/test_sheets_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_sheets_to_gcs.py rename to providers/tests/google/cloud/transfers/test_sheets_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_sql_to_gcs.py b/providers/tests/google/cloud/transfers/test_sql_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_sql_to_gcs.py rename to providers/tests/google/cloud/transfers/test_sql_to_gcs.py diff --git a/tests/providers/google/cloud/transfers/test_trino_to_gcs.py b/providers/tests/google/cloud/transfers/test_trino_to_gcs.py similarity index 100% rename from tests/providers/google/cloud/transfers/test_trino_to_gcs.py rename to providers/tests/google/cloud/transfers/test_trino_to_gcs.py diff --git a/tests/providers/discord/notifications/__init__.py b/providers/tests/google/cloud/triggers/__init__.py similarity index 100% rename from tests/providers/discord/notifications/__init__.py rename to providers/tests/google/cloud/triggers/__init__.py diff --git a/tests/providers/google/cloud/triggers/test_bigquery.py b/providers/tests/google/cloud/triggers/test_bigquery.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_bigquery.py rename to providers/tests/google/cloud/triggers/test_bigquery.py diff --git a/tests/providers/google/cloud/triggers/test_bigquery_dts.py b/providers/tests/google/cloud/triggers/test_bigquery_dts.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_bigquery_dts.py rename to providers/tests/google/cloud/triggers/test_bigquery_dts.py diff --git a/tests/providers/google/cloud/triggers/test_cloud_batch.py b/providers/tests/google/cloud/triggers/test_cloud_batch.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_cloud_batch.py rename to providers/tests/google/cloud/triggers/test_cloud_batch.py diff --git a/tests/providers/google/cloud/triggers/test_cloud_build.py b/providers/tests/google/cloud/triggers/test_cloud_build.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_cloud_build.py rename to providers/tests/google/cloud/triggers/test_cloud_build.py diff --git a/tests/providers/google/cloud/triggers/test_cloud_composer.py b/providers/tests/google/cloud/triggers/test_cloud_composer.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_cloud_composer.py rename to providers/tests/google/cloud/triggers/test_cloud_composer.py diff --git a/tests/providers/google/cloud/triggers/test_cloud_run.py b/providers/tests/google/cloud/triggers/test_cloud_run.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_cloud_run.py rename to providers/tests/google/cloud/triggers/test_cloud_run.py diff --git a/tests/providers/google/cloud/triggers/test_cloud_sql.py b/providers/tests/google/cloud/triggers/test_cloud_sql.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_cloud_sql.py rename to providers/tests/google/cloud/triggers/test_cloud_sql.py diff --git a/tests/providers/google/cloud/triggers/test_cloud_storage_transfer_service.py b/providers/tests/google/cloud/triggers/test_cloud_storage_transfer_service.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_cloud_storage_transfer_service.py rename to providers/tests/google/cloud/triggers/test_cloud_storage_transfer_service.py diff --git a/tests/providers/google/cloud/triggers/test_dataflow.py b/providers/tests/google/cloud/triggers/test_dataflow.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_dataflow.py rename to providers/tests/google/cloud/triggers/test_dataflow.py diff --git a/tests/providers/google/cloud/triggers/test_datafusion.py b/providers/tests/google/cloud/triggers/test_datafusion.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_datafusion.py rename to providers/tests/google/cloud/triggers/test_datafusion.py diff --git a/tests/providers/google/cloud/triggers/test_dataplex.py b/providers/tests/google/cloud/triggers/test_dataplex.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_dataplex.py rename to providers/tests/google/cloud/triggers/test_dataplex.py diff --git a/tests/providers/google/cloud/triggers/test_dataproc.py b/providers/tests/google/cloud/triggers/test_dataproc.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_dataproc.py rename to providers/tests/google/cloud/triggers/test_dataproc.py diff --git a/tests/providers/google/cloud/triggers/test_gcs.py b/providers/tests/google/cloud/triggers/test_gcs.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_gcs.py rename to providers/tests/google/cloud/triggers/test_gcs.py diff --git a/tests/providers/google/cloud/triggers/test_kubernetes_engine.py b/providers/tests/google/cloud/triggers/test_kubernetes_engine.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_kubernetes_engine.py rename to providers/tests/google/cloud/triggers/test_kubernetes_engine.py diff --git a/tests/providers/google/cloud/triggers/test_mlengine.py b/providers/tests/google/cloud/triggers/test_mlengine.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_mlengine.py rename to providers/tests/google/cloud/triggers/test_mlengine.py diff --git a/tests/providers/google/cloud/triggers/test_pubsub.py b/providers/tests/google/cloud/triggers/test_pubsub.py similarity index 100% rename from tests/providers/google/cloud/triggers/test_pubsub.py rename to providers/tests/google/cloud/triggers/test_pubsub.py diff --git a/tests/providers/google/cloud/triggers/test_vertex_ai.py b/providers/tests/google/cloud/triggers/test_vertex_ai.py similarity index 99% rename from tests/providers/google/cloud/triggers/test_vertex_ai.py rename to providers/tests/google/cloud/triggers/test_vertex_ai.py index 946ebf49487fb..e2e588a2bfdd9 100644 --- a/tests/providers/google/cloud/triggers/test_vertex_ai.py +++ b/providers/tests/google/cloud/triggers/test_vertex_ai.py @@ -46,7 +46,8 @@ RunPipelineJobTrigger, ) from airflow.triggers.base import TriggerEvent -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id TEST_CONN_ID = "test_connection" TEST_PROJECT_ID = "test_propject_id" diff --git a/tests/providers/ftp/hooks/__init__.py b/providers/tests/google/cloud/utils/__init__.py similarity index 100% rename from tests/providers/ftp/hooks/__init__.py rename to providers/tests/google/cloud/utils/__init__.py diff --git a/tests/providers/google/cloud/utils/airflow_util.py b/providers/tests/google/cloud/utils/airflow_util.py similarity index 100% rename from tests/providers/google/cloud/utils/airflow_util.py rename to providers/tests/google/cloud/utils/airflow_util.py diff --git a/tests/providers/google/cloud/utils/base_gcp_mock.py b/providers/tests/google/cloud/utils/base_gcp_mock.py similarity index 100% rename from tests/providers/google/cloud/utils/base_gcp_mock.py rename to providers/tests/google/cloud/utils/base_gcp_mock.py diff --git a/tests/providers/google/cloud/utils/gcp_authenticator.py b/providers/tests/google/cloud/utils/gcp_authenticator.py similarity index 98% rename from tests/providers/google/cloud/utils/gcp_authenticator.py rename to providers/tests/google/cloud/utils/gcp_authenticator.py index 7c95e57dc328f..6bb11b260644c 100644 --- a/tests/providers/google/cloud/utils/gcp_authenticator.py +++ b/providers/tests/google/cloud/utils/gcp_authenticator.py @@ -25,9 +25,10 @@ from airflow.exceptions import AirflowException from airflow.models import Connection +from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from dev.tests_common.test_utils.logging_command_executor import CommandExecutor + # Please keep these variables in alphabetical order. -from tests.test_utils import AIRFLOW_MAIN_FOLDER -from tests.test_utils.logging_command_executor import CommandExecutor GCP_AI_KEY = "gcp_ai.json" GCP_BIGQUERY_KEY = "gcp_bigquery.json" diff --git a/tests/providers/google/cloud/utils/job_details.json b/providers/tests/google/cloud/utils/job_details.json similarity index 100% rename from tests/providers/google/cloud/utils/job_details.json rename to providers/tests/google/cloud/utils/job_details.json diff --git a/tests/providers/google/cloud/utils/out_table_details.json b/providers/tests/google/cloud/utils/out_table_details.json similarity index 100% rename from tests/providers/google/cloud/utils/out_table_details.json rename to providers/tests/google/cloud/utils/out_table_details.json diff --git a/tests/providers/google/cloud/utils/script_job_details.json b/providers/tests/google/cloud/utils/script_job_details.json similarity index 100% rename from tests/providers/google/cloud/utils/script_job_details.json rename to providers/tests/google/cloud/utils/script_job_details.json diff --git a/tests/providers/google/cloud/utils/table_details.json b/providers/tests/google/cloud/utils/table_details.json similarity index 100% rename from tests/providers/google/cloud/utils/table_details.json rename to providers/tests/google/cloud/utils/table_details.json diff --git a/tests/providers/google/cloud/utils/test_credentials_provider.py b/providers/tests/google/cloud/utils/test_credentials_provider.py similarity index 98% rename from tests/providers/google/cloud/utils/test_credentials_provider.py rename to providers/tests/google/cloud/utils/test_credentials_provider.py index 3cab06cee8a46..2e6873775745a 100644 --- a/tests/providers/google/cloud/utils/test_credentials_provider.py +++ b/providers/tests/google/cloud/utils/test_credentials_provider.py @@ -319,9 +319,10 @@ def test_get_credentials_and_project_id_with_service_account_info( @mock.patch("google.auth.load_credentials_from_file", return_value=("CREDENTIALS", "PROJECT_ID")) def test_get_credentials_using_credential_config_file(self, mock_load_credentials_from_file, caplog): - with caplog.at_level( - level=logging.DEBUG, logger=CRED_PROVIDER_LOGGER_NAME - ), NamedTemporaryFile() as temp_file: + with ( + caplog.at_level(level=logging.DEBUG, logger=CRED_PROVIDER_LOGGER_NAME), + NamedTemporaryFile() as temp_file, + ): caplog.clear() result = get_credentials_and_project_id(credential_config_file=temp_file.name) mock_load_credentials_from_file.assert_called_once_with(temp_file.name, scopes=None) @@ -350,8 +351,9 @@ def test_get_credentials_using_credential_config_string(self, mock_load_credenti def test_get_credentials_using_credential_config_invalid_string(self, caplog): caplog.clear() - with pytest.raises(DefaultCredentialsError), caplog.at_level( - level=logging.DEBUG, logger=CRED_PROVIDER_LOGGER_NAME + with ( + pytest.raises(DefaultCredentialsError), + caplog.at_level(level=logging.DEBUG, logger=CRED_PROVIDER_LOGGER_NAME), ): get_credentials_and_project_id(credential_config_file="invalid json}}}}") assert "Getting connection using credential configuration string." in caplog.messages diff --git a/tests/providers/google/cloud/utils/test_datafusion.py b/providers/tests/google/cloud/utils/test_datafusion.py similarity index 100% rename from tests/providers/google/cloud/utils/test_datafusion.py rename to providers/tests/google/cloud/utils/test_datafusion.py diff --git a/tests/providers/google/cloud/utils/test_dataproc.py b/providers/tests/google/cloud/utils/test_dataproc.py similarity index 100% rename from tests/providers/google/cloud/utils/test_dataproc.py rename to providers/tests/google/cloud/utils/test_dataproc.py diff --git a/tests/providers/google/cloud/utils/test_external_token_supplier.py b/providers/tests/google/cloud/utils/test_external_token_supplier.py similarity index 100% rename from tests/providers/google/cloud/utils/test_external_token_supplier.py rename to providers/tests/google/cloud/utils/test_external_token_supplier.py diff --git a/tests/providers/google/cloud/utils/test_field_sanitizer.py b/providers/tests/google/cloud/utils/test_field_sanitizer.py similarity index 100% rename from tests/providers/google/cloud/utils/test_field_sanitizer.py rename to providers/tests/google/cloud/utils/test_field_sanitizer.py diff --git a/tests/providers/google/cloud/utils/test_field_validator.py b/providers/tests/google/cloud/utils/test_field_validator.py similarity index 100% rename from tests/providers/google/cloud/utils/test_field_validator.py rename to providers/tests/google/cloud/utils/test_field_validator.py diff --git a/tests/providers/google/cloud/utils/test_helpers.py b/providers/tests/google/cloud/utils/test_helpers.py similarity index 100% rename from tests/providers/google/cloud/utils/test_helpers.py rename to providers/tests/google/cloud/utils/test_helpers.py diff --git a/tests/providers/google/cloud/utils/test_mlengine_operator_utils.py b/providers/tests/google/cloud/utils/test_mlengine_operator_utils.py similarity index 100% rename from tests/providers/google/cloud/utils/test_mlengine_operator_utils.py rename to providers/tests/google/cloud/utils/test_mlengine_operator_utils.py diff --git a/tests/providers/google/cloud/utils/test_mlengine_prediction_summary.py b/providers/tests/google/cloud/utils/test_mlengine_prediction_summary.py similarity index 91% rename from tests/providers/google/cloud/utils/test_mlengine_prediction_summary.py rename to providers/tests/google/cloud/utils/test_mlengine_prediction_summary.py index 7d28cf1e68413..984c4847d9424 100644 --- a/tests/providers/google/cloud/utils/test_mlengine_prediction_summary.py +++ b/providers/tests/google/cloud/utils/test_mlengine_prediction_summary.py @@ -87,13 +87,11 @@ def test_run_should_fail_if_enc_fn_is_not_callable(self): ) def test_run_should_not_fail_with_valid_fn(self): - with mock.patch.object( - mlengine_prediction_summary.beam.pipeline, "PipelineOptions" - ) as pipeline_mock, mock.patch.object( - mlengine_prediction_summary.beam, "Pipeline" - ) as pipeline_obj_mock, mock.patch.object( - mlengine_prediction_summary.beam.io, "ReadFromText" - ) as io_mock: + with ( + mock.patch.object(mlengine_prediction_summary.beam.pipeline, "PipelineOptions") as pipeline_mock, + mock.patch.object(mlengine_prediction_summary.beam, "Pipeline") as pipeline_obj_mock, + mock.patch.object(mlengine_prediction_summary.beam.io, "ReadFromText") as io_mock, + ): def metric_function(): return 1 diff --git a/tests/providers/docker/decorators/__init__.py b/providers/tests/google/common/__init__.py similarity index 100% rename from tests/providers/docker/decorators/__init__.py rename to providers/tests/google/common/__init__.py diff --git a/tests/providers/elasticsearch/log/__init__.py b/providers/tests/google/common/auth_backend/__init__.py similarity index 100% rename from tests/providers/elasticsearch/log/__init__.py rename to providers/tests/google/common/auth_backend/__init__.py diff --git a/tests/providers/google/common/auth_backend/test_google_openid.py b/providers/tests/google/common/auth_backend/test_google_openid.py similarity index 95% rename from tests/providers/google/common/auth_backend/test_google_openid.py rename to providers/tests/google/common/auth_backend/test_google_openid.py index 260ae0d6fb5e1..67b0ff2003d22 100644 --- a/tests/providers/google/common/auth_backend/test_google_openid.py +++ b/providers/tests/google/common/auth_backend/test_google_openid.py @@ -22,10 +22,11 @@ from google.auth.exceptions import GoogleAuthError from airflow.www.app import create_app -from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_pools -from tests.test_utils.decorators import dont_initialize_flask_app_submodules + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_pools +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules @pytest.fixture(scope="module") diff --git a/tests/providers/fab/auth_manager/api/auth/__init__.py b/providers/tests/google/common/hooks/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/api/auth/__init__.py rename to providers/tests/google/common/hooks/__init__.py diff --git a/tests/providers/google/common/hooks/test_base_google.py b/providers/tests/google/common/hooks/test_base_google.py similarity index 99% rename from tests/providers/google/common/hooks/test_base_google.py rename to providers/tests/google/common/hooks/test_base_google.py index 2b67b8844ecbc..a4317c7e8b9ef 100644 --- a/tests/providers/google/common/hooks/test_base_google.py +++ b/providers/tests/google/common/hooks/test_base_google.py @@ -38,7 +38,8 @@ from airflow.providers.google.cloud.utils.credentials_provider import _DEFAULT_SCOPES from airflow.providers.google.common.hooks import base_google as hook from airflow.providers.google.common.hooks.base_google import GoogleBaseHook, is_refresh_credentials_exception -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id default_creds_available = True default_project = None diff --git a/tests/providers/google/common/hooks/test_discovery_api.py b/providers/tests/google/common/hooks/test_discovery_api.py similarity index 100% rename from tests/providers/google/common/hooks/test_discovery_api.py rename to providers/tests/google/common/hooks/test_discovery_api.py diff --git a/tests/providers/google/common/test_deprecated.py b/providers/tests/google/common/test_deprecated.py similarity index 100% rename from tests/providers/google/common/test_deprecated.py rename to providers/tests/google/common/test_deprecated.py diff --git a/tests/providers/fab/auth_manager/api/auth/backend/__init__.py b/providers/tests/google/common/utils/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/api/auth/backend/__init__.py rename to providers/tests/google/common/utils/__init__.py diff --git a/tests/providers/google/common/utils/test_id_token_credentials.py b/providers/tests/google/common/utils/test_id_token_credentials.py similarity index 100% rename from tests/providers/google/common/utils/test_id_token_credentials.py rename to providers/tests/google/common/utils/test_id_token_credentials.py diff --git a/tests/providers/fab/auth_manager/api_endpoints/__init__.py b/providers/tests/google/firebase/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/api_endpoints/__init__.py rename to providers/tests/google/firebase/__init__.py diff --git a/tests/providers/fab/auth_manager/cli_commands/__init__.py b/providers/tests/google/firebase/hooks/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/cli_commands/__init__.py rename to providers/tests/google/firebase/hooks/__init__.py diff --git a/tests/providers/google/firebase/hooks/test_firestore.py b/providers/tests/google/firebase/hooks/test_firestore.py similarity index 99% rename from tests/providers/google/firebase/hooks/test_firestore.py rename to providers/tests/google/firebase/hooks/test_firestore.py index e6c17105a60fc..b7308c5f9b9ce 100644 --- a/tests/providers/google/firebase/hooks/test_firestore.py +++ b/providers/tests/google/firebase/hooks/test_firestore.py @@ -27,7 +27,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.firebase.hooks.firestore import CloudFirestoreHook -from tests.providers.google.cloud.utils.base_gcp_mock import ( + +from providers.tests.google.cloud.utils.base_gcp_mock import ( GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, diff --git a/tests/providers/fab/auth_manager/decorators/__init__.py b/providers/tests/google/firebase/operators/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/decorators/__init__.py rename to providers/tests/google/firebase/operators/__init__.py diff --git a/tests/providers/google/firebase/operators/test_firestore.py b/providers/tests/google/firebase/operators/test_firestore.py similarity index 100% rename from tests/providers/google/firebase/operators/test_firestore.py rename to providers/tests/google/firebase/operators/test_firestore.py diff --git a/tests/providers/ftp/sensors/__init__.py b/providers/tests/google/leveldb/__init__.py similarity index 100% rename from tests/providers/ftp/sensors/__init__.py rename to providers/tests/google/leveldb/__init__.py diff --git a/tests/providers/google/cloud/utils/__init__.py b/providers/tests/google/leveldb/hooks/__init__.py similarity index 100% rename from tests/providers/google/cloud/utils/__init__.py rename to providers/tests/google/leveldb/hooks/__init__.py diff --git a/tests/providers/google/leveldb/hooks/test_leveldb.py b/providers/tests/google/leveldb/hooks/test_leveldb.py similarity index 100% rename from tests/providers/google/leveldb/hooks/test_leveldb.py rename to providers/tests/google/leveldb/hooks/test_leveldb.py diff --git a/tests/providers/google/leveldb/__init__.py b/providers/tests/google/leveldb/operators/__init__.py similarity index 100% rename from tests/providers/google/leveldb/__init__.py rename to providers/tests/google/leveldb/operators/__init__.py diff --git a/tests/providers/google/leveldb/operators/test_leveldb.py b/providers/tests/google/leveldb/operators/test_leveldb.py similarity index 100% rename from tests/providers/google/leveldb/operators/test_leveldb.py rename to providers/tests/google/leveldb/operators/test_leveldb.py diff --git a/tests/providers/fab/auth_manager/security_manager/__init__.py b/providers/tests/google/marketing_platform/__init__.py similarity index 100% rename from tests/providers/fab/auth_manager/security_manager/__init__.py rename to providers/tests/google/marketing_platform/__init__.py diff --git a/tests/providers/facebook/__init__.py b/providers/tests/google/marketing_platform/hooks/__init__.py similarity index 100% rename from tests/providers/facebook/__init__.py rename to providers/tests/google/marketing_platform/hooks/__init__.py diff --git a/tests/providers/google/marketing_platform/hooks/test_analytics.py b/providers/tests/google/marketing_platform/hooks/test_analytics.py similarity index 96% rename from tests/providers/google/marketing_platform/hooks/test_analytics.py rename to providers/tests/google/marketing_platform/hooks/test_analytics.py index e88e8fe862a35..9944e2a9947ac 100644 --- a/tests/providers/google/marketing_platform/hooks/test_analytics.py +++ b/providers/tests/google/marketing_platform/hooks/test_analytics.py @@ -24,7 +24,8 @@ from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.google.marketing_platform.hooks.analytics import GoogleAnalyticsHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id WEB_PROPERTY_AD_WORDS_LINK_ID = "AAIIRRFFLLOOWW" WEB_PROPERTY_ID = "web_property_id" @@ -38,10 +39,13 @@ class TestGoogleAnalyticsHook: def setup_method(self): - with mock.patch( - "airflow.providers.google.common.hooks.base_google.GoogleBaseHook.__init__", - new=mock_base_gcp_hook_default_project_id, - ), warnings.catch_warnings(): + with ( + mock.patch( + "airflow.providers.google.common.hooks.base_google.GoogleBaseHook.__init__", + new=mock_base_gcp_hook_default_project_id, + ), + warnings.catch_warnings(), + ): warnings.simplefilter("ignore", AirflowProviderDeprecationWarning) self.hook = GoogleAnalyticsHook(API_VERSION, GCP_CONN_ID) diff --git a/tests/providers/google/marketing_platform/hooks/test_analytics_admin.py b/providers/tests/google/marketing_platform/hooks/test_analytics_admin.py similarity index 99% rename from tests/providers/google/marketing_platform/hooks/test_analytics_admin.py rename to providers/tests/google/marketing_platform/hooks/test_analytics_admin.py index 81a5210d499ab..9887dac4db240 100644 --- a/tests/providers/google/marketing_platform/hooks/test_analytics_admin.py +++ b/providers/tests/google/marketing_platform/hooks/test_analytics_admin.py @@ -20,7 +20,8 @@ from unittest import mock from airflow.providers.google.marketing_platform.hooks.analytics_admin import GoogleAnalyticsAdminHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id GCP_CONN_ID = "test_gcp_conn_id" IMPERSONATION_CHAIN = ["ACCOUNT_1", "ACCOUNT_2", "ACCOUNT_3"] diff --git a/tests/providers/google/marketing_platform/hooks/test_campaign_manager.py b/providers/tests/google/marketing_platform/hooks/test_campaign_manager.py similarity index 99% rename from tests/providers/google/marketing_platform/hooks/test_campaign_manager.py rename to providers/tests/google/marketing_platform/hooks/test_campaign_manager.py index 2e7cd821b3c25..0c16e1979592b 100644 --- a/tests/providers/google/marketing_platform/hooks/test_campaign_manager.py +++ b/providers/tests/google/marketing_platform/hooks/test_campaign_manager.py @@ -20,7 +20,8 @@ from unittest import mock from airflow.providers.google.marketing_platform.hooks.campaign_manager import GoogleCampaignManagerHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id API_VERSION = "v4" GCP_CONN_ID = "google_cloud_default" diff --git a/tests/providers/google/marketing_platform/hooks/test_display_video.py b/providers/tests/google/marketing_platform/hooks/test_display_video.py similarity index 99% rename from tests/providers/google/marketing_platform/hooks/test_display_video.py rename to providers/tests/google/marketing_platform/hooks/test_display_video.py index c6eae2aa8f9b1..2c476428e89f9 100644 --- a/tests/providers/google/marketing_platform/hooks/test_display_video.py +++ b/providers/tests/google/marketing_platform/hooks/test_display_video.py @@ -20,7 +20,8 @@ from unittest import mock from airflow.providers.google.marketing_platform.hooks.display_video import GoogleDisplayVideo360Hook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id API_VERSION = "v2" GCP_CONN_ID = "google_cloud_default" diff --git a/tests/providers/google/marketing_platform/hooks/test_search_ads.py b/providers/tests/google/marketing_platform/hooks/test_search_ads.py similarity index 99% rename from tests/providers/google/marketing_platform/hooks/test_search_ads.py rename to providers/tests/google/marketing_platform/hooks/test_search_ads.py index 1fc08237f1458..c3b585364cea5 100644 --- a/tests/providers/google/marketing_platform/hooks/test_search_ads.py +++ b/providers/tests/google/marketing_platform/hooks/test_search_ads.py @@ -25,7 +25,8 @@ GoogleSearchAdsHook, GoogleSearchAdsReportingHook, ) -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id GCP_CONN_ID = "google_cloud_default" API_VERSION = "v0" diff --git a/tests/providers/google/leveldb/hooks/__init__.py b/providers/tests/google/marketing_platform/links/__init__.py similarity index 100% rename from tests/providers/google/leveldb/hooks/__init__.py rename to providers/tests/google/marketing_platform/links/__init__.py diff --git a/tests/providers/google/marketing_platform/links/test_analytics_admin.py b/providers/tests/google/marketing_platform/links/test_analytics_admin.py similarity index 100% rename from tests/providers/google/marketing_platform/links/test_analytics_admin.py rename to providers/tests/google/marketing_platform/links/test_analytics_admin.py diff --git a/tests/providers/facebook/ads/__init__.py b/providers/tests/google/marketing_platform/operators/__init__.py similarity index 100% rename from tests/providers/facebook/ads/__init__.py rename to providers/tests/google/marketing_platform/operators/__init__.py diff --git a/tests/providers/google/marketing_platform/operators/test_analytics.py b/providers/tests/google/marketing_platform/operators/test_analytics.py similarity index 100% rename from tests/providers/google/marketing_platform/operators/test_analytics.py rename to providers/tests/google/marketing_platform/operators/test_analytics.py diff --git a/tests/providers/google/marketing_platform/operators/test_analytics_admin.py b/providers/tests/google/marketing_platform/operators/test_analytics_admin.py similarity index 100% rename from tests/providers/google/marketing_platform/operators/test_analytics_admin.py rename to providers/tests/google/marketing_platform/operators/test_analytics_admin.py diff --git a/tests/providers/google/marketing_platform/operators/test_campaign_manager.py b/providers/tests/google/marketing_platform/operators/test_campaign_manager.py similarity index 100% rename from tests/providers/google/marketing_platform/operators/test_campaign_manager.py rename to providers/tests/google/marketing_platform/operators/test_campaign_manager.py diff --git a/tests/providers/google/marketing_platform/operators/test_display_video.py b/providers/tests/google/marketing_platform/operators/test_display_video.py similarity index 100% rename from tests/providers/google/marketing_platform/operators/test_display_video.py rename to providers/tests/google/marketing_platform/operators/test_display_video.py diff --git a/tests/providers/google/marketing_platform/operators/test_display_video_system.py b/providers/tests/google/marketing_platform/operators/test_display_video_system.py similarity index 91% rename from tests/providers/google/marketing_platform/operators/test_display_video_system.py rename to providers/tests/google/marketing_platform/operators/test_display_video_system.py index 10422e8ccb912..78f5d4ee021f9 100644 --- a/tests/providers/google/marketing_platform/operators/test_display_video_system.py +++ b/providers/tests/google/marketing_platform/operators/test_display_video_system.py @@ -20,8 +20,13 @@ from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook from airflow.providers.google.marketing_platform.example_dags.example_display_video import BUCKET -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY, GMP_KEY -from tests.test_utils.gcp_system_helpers import MARKETING_DAG_FOLDER, GoogleSystemTest, provide_gcp_context + +from dev.tests_common.test_utils.gcp_system_helpers import ( + MARKETING_DAG_FOLDER, + GoogleSystemTest, + provide_gcp_context, +) +from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY, GMP_KEY # Requires the following scope: SCOPES = [ diff --git a/tests/providers/google/marketing_platform/operators/test_search_ads.py b/providers/tests/google/marketing_platform/operators/test_search_ads.py similarity index 100% rename from tests/providers/google/marketing_platform/operators/test_search_ads.py rename to providers/tests/google/marketing_platform/operators/test_search_ads.py diff --git a/tests/providers/facebook/ads/hooks/__init__.py b/providers/tests/google/marketing_platform/sensors/__init__.py similarity index 100% rename from tests/providers/facebook/ads/hooks/__init__.py rename to providers/tests/google/marketing_platform/sensors/__init__.py diff --git a/tests/providers/google/marketing_platform/sensors/test_campaign_manager.py b/providers/tests/google/marketing_platform/sensors/test_campaign_manager.py similarity index 100% rename from tests/providers/google/marketing_platform/sensors/test_campaign_manager.py rename to providers/tests/google/marketing_platform/sensors/test_campaign_manager.py diff --git a/tests/providers/google/marketing_platform/sensors/test_display_video.py b/providers/tests/google/marketing_platform/sensors/test_display_video.py similarity index 100% rename from tests/providers/google/marketing_platform/sensors/test_display_video.py rename to providers/tests/google/marketing_platform/sensors/test_display_video.py diff --git a/tests/providers/google/leveldb/operators/__init__.py b/providers/tests/google/suite/__init__.py similarity index 100% rename from tests/providers/google/leveldb/operators/__init__.py rename to providers/tests/google/suite/__init__.py diff --git a/tests/providers/google/marketing_platform/links/__init__.py b/providers/tests/google/suite/hooks/__init__.py similarity index 100% rename from tests/providers/google/marketing_platform/links/__init__.py rename to providers/tests/google/suite/hooks/__init__.py diff --git a/tests/providers/google/suite/hooks/test_calendar.py b/providers/tests/google/suite/hooks/test_calendar.py similarity index 98% rename from tests/providers/google/suite/hooks/test_calendar.py rename to providers/tests/google/suite/hooks/test_calendar.py index e072043f6ea5d..dfcf3e10626f3 100644 --- a/tests/providers/google/suite/hooks/test_calendar.py +++ b/providers/tests/google/suite/hooks/test_calendar.py @@ -24,7 +24,8 @@ from unittest import mock from airflow.providers.google.suite.hooks.calendar import GoogleCalendarHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id API_VERSION = "api_version" GCP_CONN_ID = "test" diff --git a/tests/providers/google/suite/hooks/test_drive.py b/providers/tests/google/suite/hooks/test_drive.py similarity index 99% rename from tests/providers/google/suite/hooks/test_drive.py rename to providers/tests/google/suite/hooks/test_drive.py index 00423f263401c..a6fb0391d2c8c 100644 --- a/tests/providers/google/suite/hooks/test_drive.py +++ b/providers/tests/google/suite/hooks/test_drive.py @@ -22,7 +22,8 @@ import pytest from airflow.providers.google.suite.hooks.drive import GoogleDriveHook -from tests.providers.google.cloud.utils.base_gcp_mock import GCP_CONNECTION_WITH_PROJECT_ID + +from providers.tests.google.cloud.utils.base_gcp_mock import GCP_CONNECTION_WITH_PROJECT_ID @pytest.mark.db_test diff --git a/tests/providers/google/suite/hooks/test_sheets.py b/providers/tests/google/suite/hooks/test_sheets.py similarity index 99% rename from tests/providers/google/suite/hooks/test_sheets.py rename to providers/tests/google/suite/hooks/test_sheets.py index ca3159d7d1c93..1be75426a336a 100644 --- a/tests/providers/google/suite/hooks/test_sheets.py +++ b/providers/tests/google/suite/hooks/test_sheets.py @@ -27,7 +27,8 @@ from airflow.exceptions import AirflowException from airflow.providers.google.suite.hooks.sheets import GSheetsHook -from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id + +from providers.tests.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id GCP_CONN_ID = "test" SPREADSHEET_ID = "1234567890" diff --git a/tests/providers/google/suite/__init__.py b/providers/tests/google/suite/operators/__init__.py similarity index 100% rename from tests/providers/google/suite/__init__.py rename to providers/tests/google/suite/operators/__init__.py diff --git a/tests/providers/google/suite/operators/test_sheets.py b/providers/tests/google/suite/operators/test_sheets.py similarity index 100% rename from tests/providers/google/suite/operators/test_sheets.py rename to providers/tests/google/suite/operators/test_sheets.py diff --git a/tests/providers/google/suite/hooks/__init__.py b/providers/tests/google/suite/sensors/__init__.py similarity index 100% rename from tests/providers/google/suite/hooks/__init__.py rename to providers/tests/google/suite/sensors/__init__.py diff --git a/tests/providers/google/suite/sensors/test_drive.py b/providers/tests/google/suite/sensors/test_drive.py similarity index 100% rename from tests/providers/google/suite/sensors/test_drive.py rename to providers/tests/google/suite/sensors/test_drive.py diff --git a/tests/providers/ftp/operators/__init__.py b/providers/tests/google/suite/transfers/__init__.py similarity index 100% rename from tests/providers/ftp/operators/__init__.py rename to providers/tests/google/suite/transfers/__init__.py diff --git a/tests/providers/google/suite/transfers/test_gcs_to_gdrive.py b/providers/tests/google/suite/transfers/test_gcs_to_gdrive.py similarity index 100% rename from tests/providers/google/suite/transfers/test_gcs_to_gdrive.py rename to providers/tests/google/suite/transfers/test_gcs_to_gdrive.py diff --git a/tests/providers/google/suite/transfers/test_gcs_to_sheets.py b/providers/tests/google/suite/transfers/test_gcs_to_sheets.py similarity index 100% rename from tests/providers/google/suite/transfers/test_gcs_to_sheets.py rename to providers/tests/google/suite/transfers/test_gcs_to_sheets.py diff --git a/tests/providers/google/suite/transfers/test_local_to_drive.py b/providers/tests/google/suite/transfers/test_local_to_drive.py similarity index 100% rename from tests/providers/google/suite/transfers/test_local_to_drive.py rename to providers/tests/google/suite/transfers/test_local_to_drive.py diff --git a/tests/providers/google/suite/transfers/test_sql_to_sheets.py b/providers/tests/google/suite/transfers/test_sql_to_sheets.py similarity index 100% rename from tests/providers/google/suite/transfers/test_sql_to_sheets.py rename to providers/tests/google/suite/transfers/test_sql_to_sheets.py diff --git a/tests/providers/google/test_go_module.py b/providers/tests/google/test_go_module.py similarity index 100% rename from tests/providers/google/test_go_module.py rename to providers/tests/google/test_go_module.py diff --git a/tests/providers/google/suite/operators/__init__.py b/providers/tests/grpc/__init__.py similarity index 100% rename from tests/providers/google/suite/operators/__init__.py rename to providers/tests/grpc/__init__.py diff --git a/tests/providers/google/suite/sensors/__init__.py b/providers/tests/grpc/hooks/__init__.py similarity index 100% rename from tests/providers/google/suite/sensors/__init__.py rename to providers/tests/grpc/hooks/__init__.py diff --git a/tests/providers/grpc/hooks/test_grpc.py b/providers/tests/grpc/hooks/test_grpc.py similarity index 100% rename from tests/providers/grpc/hooks/test_grpc.py rename to providers/tests/grpc/hooks/test_grpc.py diff --git a/tests/providers/grpc/__init__.py b/providers/tests/grpc/operators/__init__.py similarity index 100% rename from tests/providers/grpc/__init__.py rename to providers/tests/grpc/operators/__init__.py diff --git a/tests/providers/grpc/operators/test_grpc.py b/providers/tests/grpc/operators/test_grpc.py similarity index 100% rename from tests/providers/grpc/operators/test_grpc.py rename to providers/tests/grpc/operators/test_grpc.py diff --git a/tests/providers/github/__init__.py b/providers/tests/hashicorp/__init__.py similarity index 100% rename from tests/providers/github/__init__.py rename to providers/tests/hashicorp/__init__.py diff --git a/tests/providers/github/hooks/__init__.py b/providers/tests/hashicorp/_internal_client/__init__.py similarity index 100% rename from tests/providers/github/hooks/__init__.py rename to providers/tests/hashicorp/_internal_client/__init__.py diff --git a/tests/providers/hashicorp/_internal_client/test_vault_client.py b/providers/tests/hashicorp/_internal_client/test_vault_client.py similarity index 100% rename from tests/providers/hashicorp/_internal_client/test_vault_client.py rename to providers/tests/hashicorp/_internal_client/test_vault_client.py diff --git a/tests/providers/github/operators/__init__.py b/providers/tests/hashicorp/hooks/__init__.py similarity index 100% rename from tests/providers/github/operators/__init__.py rename to providers/tests/hashicorp/hooks/__init__.py diff --git a/tests/providers/hashicorp/hooks/test_vault.py b/providers/tests/hashicorp/hooks/test_vault.py similarity index 99% rename from tests/providers/hashicorp/hooks/test_vault.py rename to providers/tests/hashicorp/hooks/test_vault.py index 1880be99749e6..442a656d20064 100644 --- a/tests/providers/hashicorp/hooks/test_vault.py +++ b/providers/tests/hashicorp/hooks/test_vault.py @@ -26,7 +26,8 @@ from airflow.configuration import AirflowConfigParser from airflow.exceptions import AirflowConfigException from airflow.providers.hashicorp.hooks.vault import VaultHook -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars class TestVaultHook: diff --git a/tests/providers/github/sensors/__init__.py b/providers/tests/hashicorp/secrets/__init__.py similarity index 100% rename from tests/providers/github/sensors/__init__.py rename to providers/tests/hashicorp/secrets/__init__.py diff --git a/tests/providers/hashicorp/secrets/test_vault.py b/providers/tests/hashicorp/secrets/test_vault.py similarity index 100% rename from tests/providers/hashicorp/secrets/test_vault.py rename to providers/tests/hashicorp/secrets/test_vault.py diff --git a/tests/providers/grpc/hooks/__init__.py b/providers/tests/http/__init__.py similarity index 100% rename from tests/providers/grpc/hooks/__init__.py rename to providers/tests/http/__init__.py diff --git a/tests/providers/grpc/operators/__init__.py b/providers/tests/http/hooks/__init__.py similarity index 100% rename from tests/providers/grpc/operators/__init__.py rename to providers/tests/http/hooks/__init__.py diff --git a/tests/providers/http/hooks/test_http.py b/providers/tests/http/hooks/test_http.py similarity index 95% rename from tests/providers/http/hooks/test_http.py rename to providers/tests/http/hooks/test_http.py index 5b631df8a4d60..e09fd2d034e60 100644 --- a/tests/providers/http/hooks/test_http.py +++ b/providers/tests/http/hooks/test_http.py @@ -487,11 +487,15 @@ def test_default_auth_not_initialized(self, auth): auth.assert_not_called() def test_keep_alive_enabled(self): - with mock.patch( - "airflow.hooks.base.BaseHook.get_connection", side_effect=get_airflow_connection_with_port - ), mock.patch( - "requests_toolbelt.adapters.socket_options.TCPKeepAliveAdapter.send" - ) as tcp_keep_alive_send, mock.patch("requests.adapters.HTTPAdapter.send") as http_send: + with ( + mock.patch( + "airflow.hooks.base.BaseHook.get_connection", side_effect=get_airflow_connection_with_port + ), + mock.patch( + "requests_toolbelt.adapters.socket_options.TCPKeepAliveAdapter.send" + ) as tcp_keep_alive_send, + mock.patch("requests.adapters.HTTPAdapter.send") as http_send, + ): hook = HttpHook(method="GET") response = Response() response.status_code = HTTPStatus.OK @@ -502,11 +506,15 @@ def test_keep_alive_enabled(self): http_send.assert_not_called() def test_keep_alive_disabled(self): - with mock.patch( - "airflow.hooks.base.BaseHook.get_connection", side_effect=get_airflow_connection_with_port - ), mock.patch( - "requests_toolbelt.adapters.socket_options.TCPKeepAliveAdapter.send" - ) as tcp_keep_alive_send, mock.patch("requests.adapters.HTTPAdapter.send") as http_send: + with ( + mock.patch( + "airflow.hooks.base.BaseHook.get_connection", side_effect=get_airflow_connection_with_port + ), + mock.patch( + "requests_toolbelt.adapters.socket_options.TCPKeepAliveAdapter.send" + ) as tcp_keep_alive_send, + mock.patch("requests.adapters.HTTPAdapter.send") as http_send, + ): hook = HttpHook(method="GET", tcp_keep_alive=False) response = Response() response.status_code = HTTPStatus.OK @@ -536,9 +544,12 @@ async def test_do_api_call_async_non_retryable_error(self, aioresponse): hook = HttpAsyncHook(method="GET") aioresponse.get("http://httpbin.org/non_existent_endpoint", status=400) - with pytest.raises(AirflowException, match="400:Bad Request"), mock.patch.dict( - "os.environ", - AIRFLOW_CONN_HTTP_DEFAULT="http://httpbin.org/", + with ( + pytest.raises(AirflowException, match="400:Bad Request"), + mock.patch.dict( + "os.environ", + AIRFLOW_CONN_HTTP_DEFAULT="http://httpbin.org/", + ), ): await hook.run(endpoint="non_existent_endpoint") @@ -549,9 +560,12 @@ async def test_do_api_call_async_retryable_error(self, caplog, aioresponse): hook = HttpAsyncHook(method="GET") aioresponse.get("http://httpbin.org/non_existent_endpoint", status=500, repeat=True) - with pytest.raises(AirflowException, match="500:Internal Server Error"), mock.patch.dict( - "os.environ", - AIRFLOW_CONN_HTTP_DEFAULT="http://httpbin.org/", + with ( + pytest.raises(AirflowException, match="500:Internal Server Error"), + mock.patch.dict( + "os.environ", + AIRFLOW_CONN_HTTP_DEFAULT="http://httpbin.org/", + ), ): await hook.run(endpoint="non_existent_endpoint") @@ -700,8 +714,9 @@ def get_empty_conn(conn_id: str = "http_default"): return Connection(conn_id=conn_id, conn_type="http") hook = HttpAsyncHook() - with mock.patch("airflow.hooks.base.BaseHook.get_connection", side_effect=get_empty_conn), mock.patch( - "aiohttp.ClientSession.post", new_callable=mock.AsyncMock - ) as mocked_function: + with ( + mock.patch("airflow.hooks.base.BaseHook.get_connection", side_effect=get_empty_conn), + mock.patch("aiohttp.ClientSession.post", new_callable=mock.AsyncMock) as mocked_function, + ): await hook.run("test.com:8080/v1/test") assert mocked_function.call_args.args[0] == "http://test.com:8080/v1/test" diff --git a/tests/providers/http/__init__.py b/providers/tests/http/operators/__init__.py similarity index 100% rename from tests/providers/http/__init__.py rename to providers/tests/http/operators/__init__.py diff --git a/tests/providers/http/operators/test_http.py b/providers/tests/http/operators/test_http.py similarity index 100% rename from tests/providers/http/operators/test_http.py rename to providers/tests/http/operators/test_http.py diff --git a/tests/providers/http/hooks/__init__.py b/providers/tests/http/sensors/__init__.py similarity index 100% rename from tests/providers/http/hooks/__init__.py rename to providers/tests/http/sensors/__init__.py diff --git a/tests/providers/http/sensors/test_http.py b/providers/tests/http/sensors/test_http.py similarity index 100% rename from tests/providers/http/sensors/test_http.py rename to providers/tests/http/sensors/test_http.py diff --git a/tests/providers/http/operators/__init__.py b/providers/tests/http/triggers/__init__.py similarity index 100% rename from tests/providers/http/operators/__init__.py rename to providers/tests/http/triggers/__init__.py diff --git a/tests/providers/http/triggers/test_http.py b/providers/tests/http/triggers/test_http.py similarity index 100% rename from tests/providers/http/triggers/test_http.py rename to providers/tests/http/triggers/test_http.py diff --git a/tests/providers/http/sensors/__init__.py b/providers/tests/imap/__init__.py similarity index 100% rename from tests/providers/http/sensors/__init__.py rename to providers/tests/imap/__init__.py diff --git a/tests/providers/http/triggers/__init__.py b/providers/tests/imap/hooks/__init__.py similarity index 100% rename from tests/providers/http/triggers/__init__.py rename to providers/tests/imap/hooks/__init__.py diff --git a/tests/providers/imap/hooks/test_imap.py b/providers/tests/imap/hooks/test_imap.py similarity index 99% rename from tests/providers/imap/hooks/test_imap.py rename to providers/tests/imap/hooks/test_imap.py index 2430d3f664882..2971cfd1924c6 100644 --- a/tests/providers/imap/hooks/test_imap.py +++ b/providers/tests/imap/hooks/test_imap.py @@ -27,7 +27,8 @@ from airflow.models import Connection from airflow.providers.imap.hooks.imap import ImapHook from airflow.utils import db -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/imap/__init__.py b/providers/tests/imap/sensors/__init__.py similarity index 100% rename from tests/providers/imap/__init__.py rename to providers/tests/imap/sensors/__init__.py diff --git a/tests/providers/imap/sensors/test_imap_attachment.py b/providers/tests/imap/sensors/test_imap_attachment.py similarity index 100% rename from tests/providers/imap/sensors/test_imap_attachment.py rename to providers/tests/imap/sensors/test_imap_attachment.py diff --git a/tests/providers/google/__init__.py b/providers/tests/influxdb/__init__.py similarity index 100% rename from tests/providers/google/__init__.py rename to providers/tests/influxdb/__init__.py diff --git a/tests/providers/google/ads/__init__.py b/providers/tests/influxdb/hooks/__init__.py similarity index 100% rename from tests/providers/google/ads/__init__.py rename to providers/tests/influxdb/hooks/__init__.py diff --git a/tests/providers/influxdb/hooks/test_influxdb.py b/providers/tests/influxdb/hooks/test_influxdb.py similarity index 100% rename from tests/providers/influxdb/hooks/test_influxdb.py rename to providers/tests/influxdb/hooks/test_influxdb.py diff --git a/tests/providers/google/ads/hooks/__init__.py b/providers/tests/influxdb/operators/__init__.py similarity index 100% rename from tests/providers/google/ads/hooks/__init__.py rename to providers/tests/influxdb/operators/__init__.py diff --git a/tests/providers/influxdb/operators/test_influxdb.py b/providers/tests/influxdb/operators/test_influxdb.py similarity index 100% rename from tests/providers/influxdb/operators/test_influxdb.py rename to providers/tests/influxdb/operators/test_influxdb.py diff --git a/tests/providers/google/ads/operators/__init__.py b/providers/tests/integration/__init__.py similarity index 100% rename from tests/providers/google/ads/operators/__init__.py rename to providers/tests/integration/__init__.py diff --git a/tests/providers/google/ads/transfers/__init__.py b/providers/tests/integration/apache/__init__.py similarity index 100% rename from tests/providers/google/ads/transfers/__init__.py rename to providers/tests/integration/apache/__init__.py diff --git a/tests/providers/google/cloud/__init__.py b/providers/tests/integration/apache/cassandra/__init__.py similarity index 100% rename from tests/providers/google/cloud/__init__.py rename to providers/tests/integration/apache/cassandra/__init__.py diff --git a/tests/providers/google/cloud/_internal_client/__init__.py b/providers/tests/integration/apache/cassandra/hooks/__init__.py similarity index 100% rename from tests/providers/google/cloud/_internal_client/__init__.py rename to providers/tests/integration/apache/cassandra/hooks/__init__.py diff --git a/tests/integration/providers/apache/cassandra/hooks/test_cassandra.py b/providers/tests/integration/apache/cassandra/hooks/test_cassandra.py similarity index 100% rename from tests/integration/providers/apache/cassandra/hooks/test_cassandra.py rename to providers/tests/integration/apache/cassandra/hooks/test_cassandra.py diff --git a/tests/providers/google/cloud/hooks/__init__.py b/providers/tests/integration/apache/drill/__init__.py similarity index 100% rename from tests/providers/google/cloud/hooks/__init__.py rename to providers/tests/integration/apache/drill/__init__.py diff --git a/tests/providers/google/cloud/hooks/vertex_ai/__init__.py b/providers/tests/integration/apache/drill/hooks/__init__.py similarity index 100% rename from tests/providers/google/cloud/hooks/vertex_ai/__init__.py rename to providers/tests/integration/apache/drill/hooks/__init__.py diff --git a/tests/integration/providers/apache/drill/hooks/test_drill.py b/providers/tests/integration/apache/drill/hooks/test_drill.py similarity index 100% rename from tests/integration/providers/apache/drill/hooks/test_drill.py rename to providers/tests/integration/apache/drill/hooks/test_drill.py diff --git a/tests/providers/google/cloud/links/__init__.py b/providers/tests/integration/apache/drill/operators/__init__.py similarity index 100% rename from tests/providers/google/cloud/links/__init__.py rename to providers/tests/integration/apache/drill/operators/__init__.py diff --git a/tests/providers/google/cloud/log/__init__.py b/providers/tests/integration/apache/hive/__init__.py similarity index 100% rename from tests/providers/google/cloud/log/__init__.py rename to providers/tests/integration/apache/hive/__init__.py diff --git a/tests/providers/google/cloud/openlineage/__init__.py b/providers/tests/integration/apache/hive/transfers/__init__.py similarity index 100% rename from tests/providers/google/cloud/openlineage/__init__.py rename to providers/tests/integration/apache/hive/transfers/__init__.py diff --git a/tests/integration/providers/apache/hive/transfers/test_mssql_to_hive.py b/providers/tests/integration/apache/hive/transfers/test_mssql_to_hive.py similarity index 100% rename from tests/integration/providers/apache/hive/transfers/test_mssql_to_hive.py rename to providers/tests/integration/apache/hive/transfers/test_mssql_to_hive.py diff --git a/tests/providers/google/cloud/operators/__init__.py b/providers/tests/integration/apache/kafka/__init__.py similarity index 100% rename from tests/providers/google/cloud/operators/__init__.py rename to providers/tests/integration/apache/kafka/__init__.py diff --git a/tests/providers/google/cloud/operators/source/__init__.py b/providers/tests/integration/apache/kafka/hooks/__init__.py similarity index 100% rename from tests/providers/google/cloud/operators/source/__init__.py rename to providers/tests/integration/apache/kafka/hooks/__init__.py diff --git a/tests/integration/providers/apache/kafka/hooks/test_admin_client.py b/providers/tests/integration/apache/kafka/hooks/test_admin_client.py similarity index 100% rename from tests/integration/providers/apache/kafka/hooks/test_admin_client.py rename to providers/tests/integration/apache/kafka/hooks/test_admin_client.py diff --git a/tests/integration/providers/apache/kafka/hooks/test_consumer.py b/providers/tests/integration/apache/kafka/hooks/test_consumer.py similarity index 100% rename from tests/integration/providers/apache/kafka/hooks/test_consumer.py rename to providers/tests/integration/apache/kafka/hooks/test_consumer.py diff --git a/tests/integration/providers/apache/kafka/hooks/test_producer.py b/providers/tests/integration/apache/kafka/hooks/test_producer.py similarity index 100% rename from tests/integration/providers/apache/kafka/hooks/test_producer.py rename to providers/tests/integration/apache/kafka/hooks/test_producer.py diff --git a/tests/providers/google/cloud/operators/source/source_prefix/__init__.py b/providers/tests/integration/apache/kafka/operators/__init__.py similarity index 100% rename from tests/providers/google/cloud/operators/source/source_prefix/__init__.py rename to providers/tests/integration/apache/kafka/operators/__init__.py diff --git a/tests/integration/providers/apache/kafka/operators/test_consume.py b/providers/tests/integration/apache/kafka/operators/test_consume.py similarity index 100% rename from tests/integration/providers/apache/kafka/operators/test_consume.py rename to providers/tests/integration/apache/kafka/operators/test_consume.py diff --git a/tests/integration/providers/apache/kafka/operators/test_produce.py b/providers/tests/integration/apache/kafka/operators/test_produce.py similarity index 100% rename from tests/integration/providers/apache/kafka/operators/test_produce.py rename to providers/tests/integration/apache/kafka/operators/test_produce.py diff --git a/tests/providers/google/cloud/operators/vertex_ai/__init__.py b/providers/tests/integration/apache/kafka/sensors/__init__.py similarity index 100% rename from tests/providers/google/cloud/operators/vertex_ai/__init__.py rename to providers/tests/integration/apache/kafka/sensors/__init__.py diff --git a/tests/providers/google/cloud/secrets/__init__.py b/providers/tests/integration/apache/kafka/triggers/__init__.py similarity index 100% rename from tests/providers/google/cloud/secrets/__init__.py rename to providers/tests/integration/apache/kafka/triggers/__init__.py diff --git a/tests/integration/providers/apache/kafka/triggers/test_await_message.py b/providers/tests/integration/apache/kafka/triggers/test_await_message.py similarity index 100% rename from tests/integration/providers/apache/kafka/triggers/test_await_message.py rename to providers/tests/integration/apache/kafka/triggers/test_await_message.py diff --git a/tests/providers/google/cloud/sensors/__init__.py b/providers/tests/integration/apache/pinot/__init__.py similarity index 100% rename from tests/providers/google/cloud/sensors/__init__.py rename to providers/tests/integration/apache/pinot/__init__.py diff --git a/tests/providers/google/cloud/transfers/__init__.py b/providers/tests/integration/apache/pinot/hooks/__init__.py similarity index 100% rename from tests/providers/google/cloud/transfers/__init__.py rename to providers/tests/integration/apache/pinot/hooks/__init__.py diff --git a/tests/integration/providers/apache/pinot/hooks/test_pinot.py b/providers/tests/integration/apache/pinot/hooks/test_pinot.py similarity index 100% rename from tests/integration/providers/apache/pinot/hooks/test_pinot.py rename to providers/tests/integration/apache/pinot/hooks/test_pinot.py diff --git a/tests/providers/google/cloud/triggers/__init__.py b/providers/tests/integration/google/__init__.py similarity index 100% rename from tests/providers/google/cloud/triggers/__init__.py rename to providers/tests/integration/google/__init__.py diff --git a/tests/providers/google/common/__init__.py b/providers/tests/integration/google/cloud/__init__.py similarity index 100% rename from tests/providers/google/common/__init__.py rename to providers/tests/integration/google/cloud/__init__.py diff --git a/tests/providers/google/common/auth_backend/__init__.py b/providers/tests/integration/google/cloud/transfers/__init__.py similarity index 100% rename from tests/providers/google/common/auth_backend/__init__.py rename to providers/tests/integration/google/cloud/transfers/__init__.py diff --git a/tests/integration/providers/google/cloud/transfers/test_bigquery_to_mssql.py b/providers/tests/integration/google/cloud/transfers/test_bigquery_to_mssql.py similarity index 100% rename from tests/integration/providers/google/cloud/transfers/test_bigquery_to_mssql.py rename to providers/tests/integration/google/cloud/transfers/test_bigquery_to_mssql.py diff --git a/tests/integration/providers/google/cloud/transfers/test_mssql_to_gcs.py b/providers/tests/integration/google/cloud/transfers/test_mssql_to_gcs.py similarity index 100% rename from tests/integration/providers/google/cloud/transfers/test_mssql_to_gcs.py rename to providers/tests/integration/google/cloud/transfers/test_mssql_to_gcs.py diff --git a/tests/integration/providers/google/cloud/transfers/test_trino_to_gcs.py b/providers/tests/integration/google/cloud/transfers/test_trino_to_gcs.py similarity index 100% rename from tests/integration/providers/google/cloud/transfers/test_trino_to_gcs.py rename to providers/tests/integration/google/cloud/transfers/test_trino_to_gcs.py diff --git a/tests/providers/google/common/hooks/__init__.py b/providers/tests/integration/microsoft/__init__.py similarity index 100% rename from tests/providers/google/common/hooks/__init__.py rename to providers/tests/integration/microsoft/__init__.py diff --git a/tests/providers/google/common/utils/__init__.py b/providers/tests/integration/microsoft/mssql/__init__.py similarity index 100% rename from tests/providers/google/common/utils/__init__.py rename to providers/tests/integration/microsoft/mssql/__init__.py diff --git a/tests/providers/google/datasets/__init__.py b/providers/tests/integration/microsoft/mssql/hooks/__init__.py similarity index 100% rename from tests/providers/google/datasets/__init__.py rename to providers/tests/integration/microsoft/mssql/hooks/__init__.py diff --git a/tests/integration/providers/microsoft/mssql/hooks/test_mssql.py b/providers/tests/integration/microsoft/mssql/hooks/test_mssql.py similarity index 100% rename from tests/integration/providers/microsoft/mssql/hooks/test_mssql.py rename to providers/tests/integration/microsoft/mssql/hooks/test_mssql.py diff --git a/tests/providers/google/firebase/__init__.py b/providers/tests/integration/mongo/__init__.py similarity index 100% rename from tests/providers/google/firebase/__init__.py rename to providers/tests/integration/mongo/__init__.py diff --git a/tests/providers/google/firebase/hooks/__init__.py b/providers/tests/integration/mongo/sensors/__init__.py similarity index 100% rename from tests/providers/google/firebase/hooks/__init__.py rename to providers/tests/integration/mongo/sensors/__init__.py diff --git a/tests/integration/providers/mongo/sensors/test_mongo.py b/providers/tests/integration/mongo/sensors/test_mongo.py similarity index 100% rename from tests/integration/providers/mongo/sensors/test_mongo.py rename to providers/tests/integration/mongo/sensors/test_mongo.py diff --git a/tests/providers/google/firebase/operators/__init__.py b/providers/tests/integration/openlineage/__init__.py similarity index 100% rename from tests/providers/google/firebase/operators/__init__.py rename to providers/tests/integration/openlineage/__init__.py diff --git a/tests/providers/google/marketing_platform/__init__.py b/providers/tests/integration/openlineage/operators/__init__.py similarity index 100% rename from tests/providers/google/marketing_platform/__init__.py rename to providers/tests/integration/openlineage/operators/__init__.py diff --git a/tests/providers/google/marketing_platform/hooks/__init__.py b/providers/tests/integration/qdrant/__init__.py similarity index 100% rename from tests/providers/google/marketing_platform/hooks/__init__.py rename to providers/tests/integration/qdrant/__init__.py diff --git a/tests/providers/google/marketing_platform/operators/__init__.py b/providers/tests/integration/qdrant/hooks/__init__.py similarity index 100% rename from tests/providers/google/marketing_platform/operators/__init__.py rename to providers/tests/integration/qdrant/hooks/__init__.py diff --git a/tests/integration/providers/qdrant/hooks/test_qdrant.py b/providers/tests/integration/qdrant/hooks/test_qdrant.py similarity index 100% rename from tests/integration/providers/qdrant/hooks/test_qdrant.py rename to providers/tests/integration/qdrant/hooks/test_qdrant.py diff --git a/tests/providers/google/marketing_platform/sensors/__init__.py b/providers/tests/integration/qdrant/operators/__init__.py similarity index 100% rename from tests/providers/google/marketing_platform/sensors/__init__.py rename to providers/tests/integration/qdrant/operators/__init__.py diff --git a/tests/integration/providers/qdrant/operators/test_qdrant_ingest.py b/providers/tests/integration/qdrant/operators/test_qdrant_ingest.py similarity index 100% rename from tests/integration/providers/qdrant/operators/test_qdrant_ingest.py rename to providers/tests/integration/qdrant/operators/test_qdrant_ingest.py diff --git a/tests/providers/google/suite/transfers/__init__.py b/providers/tests/integration/redis/__init__.py similarity index 100% rename from tests/providers/google/suite/transfers/__init__.py rename to providers/tests/integration/redis/__init__.py diff --git a/tests/providers/hashicorp/__init__.py b/providers/tests/integration/redis/hooks/__init__.py similarity index 100% rename from tests/providers/hashicorp/__init__.py rename to providers/tests/integration/redis/hooks/__init__.py diff --git a/tests/integration/providers/redis/hooks/test_redis.py b/providers/tests/integration/redis/hooks/test_redis.py similarity index 100% rename from tests/integration/providers/redis/hooks/test_redis.py rename to providers/tests/integration/redis/hooks/test_redis.py diff --git a/tests/providers/hashicorp/_internal_client/__init__.py b/providers/tests/integration/redis/operators/__init__.py similarity index 100% rename from tests/providers/hashicorp/_internal_client/__init__.py rename to providers/tests/integration/redis/operators/__init__.py diff --git a/tests/integration/providers/redis/operators/test_redis_publish.py b/providers/tests/integration/redis/operators/test_redis_publish.py similarity index 100% rename from tests/integration/providers/redis/operators/test_redis_publish.py rename to providers/tests/integration/redis/operators/test_redis_publish.py diff --git a/tests/providers/hashicorp/hooks/__init__.py b/providers/tests/integration/redis/sensors/__init__.py similarity index 100% rename from tests/providers/hashicorp/hooks/__init__.py rename to providers/tests/integration/redis/sensors/__init__.py diff --git a/tests/integration/providers/redis/sensors/test_redis_key.py b/providers/tests/integration/redis/sensors/test_redis_key.py similarity index 100% rename from tests/integration/providers/redis/sensors/test_redis_key.py rename to providers/tests/integration/redis/sensors/test_redis_key.py diff --git a/tests/integration/providers/redis/sensors/test_redis_pub_sub.py b/providers/tests/integration/redis/sensors/test_redis_pub_sub.py similarity index 100% rename from tests/integration/providers/redis/sensors/test_redis_pub_sub.py rename to providers/tests/integration/redis/sensors/test_redis_pub_sub.py diff --git a/tests/providers/hashicorp/secrets/__init__.py b/providers/tests/integration/trino/__init__.py similarity index 100% rename from tests/providers/hashicorp/secrets/__init__.py rename to providers/tests/integration/trino/__init__.py diff --git a/tests/providers/influxdb/__init__.py b/providers/tests/integration/trino/hooks/__init__.py similarity index 100% rename from tests/providers/influxdb/__init__.py rename to providers/tests/integration/trino/hooks/__init__.py diff --git a/tests/integration/providers/trino/hooks/test_trino.py b/providers/tests/integration/trino/hooks/test_trino.py similarity index 100% rename from tests/integration/providers/trino/hooks/test_trino.py rename to providers/tests/integration/trino/hooks/test_trino.py diff --git a/tests/providers/influxdb/hooks/__init__.py b/providers/tests/integration/ydb/__init__.py similarity index 100% rename from tests/providers/influxdb/hooks/__init__.py rename to providers/tests/integration/ydb/__init__.py diff --git a/tests/providers/influxdb/operators/__init__.py b/providers/tests/integration/ydb/hooks/__init__.py similarity index 100% rename from tests/providers/influxdb/operators/__init__.py rename to providers/tests/integration/ydb/hooks/__init__.py diff --git a/tests/providers/jenkins/hooks/__init__.py b/providers/tests/integration/ydb/operators/__init__.py similarity index 100% rename from tests/providers/jenkins/hooks/__init__.py rename to providers/tests/integration/ydb/operators/__init__.py diff --git a/tests/integration/providers/ydb/operators/test_ydb.py b/providers/tests/integration/ydb/operators/test_ydb.py similarity index 100% rename from tests/integration/providers/ydb/operators/test_ydb.py rename to providers/tests/integration/ydb/operators/test_ydb.py diff --git a/tests/providers/imap/hooks/__init__.py b/providers/tests/jdbc/__init__.py similarity index 100% rename from tests/providers/imap/hooks/__init__.py rename to providers/tests/jdbc/__init__.py diff --git a/tests/providers/imap/sensors/__init__.py b/providers/tests/jdbc/hooks/__init__.py similarity index 100% rename from tests/providers/imap/sensors/__init__.py rename to providers/tests/jdbc/hooks/__init__.py diff --git a/tests/providers/jdbc/hooks/test_jdbc.py b/providers/tests/jdbc/hooks/test_jdbc.py similarity index 97% rename from tests/providers/jdbc/hooks/test_jdbc.py rename to providers/tests/jdbc/hooks/test_jdbc.py index f26a9d7ffb5b3..cfb27934d86da 100644 --- a/tests/providers/jdbc/hooks/test_jdbc.py +++ b/providers/tests/jdbc/hooks/test_jdbc.py @@ -157,9 +157,10 @@ def test_driver_extra_works_when_allow_driver_extra(self): assert hook.driver_class == "Blah driver class" def test_default_driver_set(self): - with patch.object(JdbcHook, "default_driver_path", "Blah driver path") as _, patch.object( - JdbcHook, "default_driver_class", "Blah driver class" - ) as _: + with ( + patch.object(JdbcHook, "default_driver_path", "Blah driver path") as _, + patch.object(JdbcHook, "default_driver_class", "Blah driver class") as _, + ): hook = get_hook() assert hook.driver_path == "Blah driver path" assert hook.driver_class == "Blah driver class" diff --git a/tests/providers/jdbc/__init__.py b/providers/tests/jdbc/operators/__init__.py similarity index 100% rename from tests/providers/jdbc/__init__.py rename to providers/tests/jdbc/operators/__init__.py diff --git a/tests/providers/jdbc/operators/test_jdbc.py b/providers/tests/jdbc/operators/test_jdbc.py similarity index 100% rename from tests/providers/jdbc/operators/test_jdbc.py rename to providers/tests/jdbc/operators/test_jdbc.py diff --git a/tests/providers/jdbc/hooks/__init__.py b/providers/tests/jenkins/__init__.py similarity index 100% rename from tests/providers/jdbc/hooks/__init__.py rename to providers/tests/jenkins/__init__.py diff --git a/tests/providers/microsoft/azure/log/__init__.py b/providers/tests/jenkins/hooks/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/log/__init__.py rename to providers/tests/jenkins/hooks/__init__.py diff --git a/tests/providers/jenkins/hooks/test_jenkins.py b/providers/tests/jenkins/hooks/test_jenkins.py similarity index 100% rename from tests/providers/jenkins/hooks/test_jenkins.py rename to providers/tests/jenkins/hooks/test_jenkins.py diff --git a/tests/providers/jdbc/operators/__init__.py b/providers/tests/jenkins/operators/__init__.py similarity index 100% rename from tests/providers/jdbc/operators/__init__.py rename to providers/tests/jenkins/operators/__init__.py diff --git a/tests/providers/jenkins/operators/test_jenkins_job_trigger.py b/providers/tests/jenkins/operators/test_jenkins_job_trigger.py similarity index 85% rename from tests/providers/jenkins/operators/test_jenkins_job_trigger.py rename to providers/tests/jenkins/operators/test_jenkins_job_trigger.py index d21cdbedc007a..2d740da1fe9f1 100644 --- a/tests/providers/jenkins/operators/test_jenkins_job_trigger.py +++ b/providers/tests/jenkins/operators/test_jenkins_job_trigger.py @@ -46,13 +46,16 @@ def test_execute(self, parameters, mocker): hook_mock = Mock(spec=JenkinsHook) hook_mock.get_jenkins_server.return_value = jenkins_mock - with patch.object( - JenkinsJobTriggerOperator, - "hook", - new_callable=mocker.PropertyMock, - ) as hook_mocked, patch( - "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" - ) as mock_make_request: + with ( + patch.object( + JenkinsJobTriggerOperator, + "hook", + new_callable=mocker.PropertyMock, + ) as hook_mocked, + patch( + "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" + ) as mock_make_request, + ): mock_make_request.side_effect = [ {"body": "", "headers": {"Location": "http://what-a-strange.url/18"}}, {"body": '{"executable":{"number":"1"}}', "headers": {}}, @@ -86,13 +89,16 @@ def test_execute_job_polling_loop(self, parameters, mocker): hook_mock = Mock(spec=JenkinsHook) hook_mock.get_jenkins_server.return_value = jenkins_mock - with patch.object( - JenkinsJobTriggerOperator, - "hook", - new_callable=mocker.PropertyMock, - ) as hook_mocked, patch( - "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" - ) as mock_make_request: + with ( + patch.object( + JenkinsJobTriggerOperator, + "hook", + new_callable=mocker.PropertyMock, + ) as hook_mocked, + patch( + "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" + ) as mock_make_request, + ): mock_make_request.side_effect = [ {"body": "", "headers": {"Location": "http://what-a-strange.url/18"}}, {"body": '{"executable":{"number":"1"}}', "headers": {}}, @@ -124,13 +130,16 @@ def test_execute_job_failure(self, parameters, mocker): hook_mock = Mock(spec=JenkinsHook) hook_mock.get_jenkins_server.return_value = jenkins_mock - with patch.object( - JenkinsJobTriggerOperator, - "hook", - new_callable=mocker.PropertyMock, - ) as hook_mocked, patch( - "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" - ) as mock_make_request: + with ( + patch.object( + JenkinsJobTriggerOperator, + "hook", + new_callable=mocker.PropertyMock, + ) as hook_mocked, + patch( + "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" + ) as mock_make_request, + ): mock_make_request.side_effect = [ {"body": "", "headers": {"Location": "http://what-a-strange.url/18"}}, {"body": '{"executable":{"number":"1"}}', "headers": {}}, @@ -182,13 +191,16 @@ def test_allowed_jenkins_states(self, state, allowed_jenkins_states, mocker): hook_mock = Mock(spec=JenkinsHook) hook_mock.get_jenkins_server.return_value = jenkins_mock - with patch.object( - JenkinsJobTriggerOperator, - "hook", - new_callable=mocker.PropertyMock, - ) as hook_mocked, patch( - "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers", - ) as mock_make_request: + with ( + patch.object( + JenkinsJobTriggerOperator, + "hook", + new_callable=mocker.PropertyMock, + ) as hook_mocked, + patch( + "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers", + ) as mock_make_request, + ): mock_make_request.side_effect = [ {"body": "", "headers": {"Location": "http://what-a-strange.url/18"}}, {"body": '{"executable":{"number":"1"}}', "headers": {}}, @@ -246,13 +258,16 @@ def test_allowed_jenkins_states_failure(self, state, allowed_jenkins_states, moc hook_mock = Mock(spec=JenkinsHook) hook_mock.get_jenkins_server.return_value = jenkins_mock - with patch.object( - JenkinsJobTriggerOperator, - "hook", - new_callable=mocker.PropertyMock, - ) as hook_mocked, patch( - "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" - ) as mock_make_request: + with ( + patch.object( + JenkinsJobTriggerOperator, + "hook", + new_callable=mocker.PropertyMock, + ) as hook_mocked, + patch( + "airflow.providers.jenkins.operators.jenkins_job_trigger.jenkins_request_with_headers" + ) as mock_make_request, + ): mock_make_request.side_effect = [ {"body": "", "headers": {"Location": "http://what-a-strange.url/18"}}, {"body": '{"executable":{"number":"1"}}', "headers": {}}, diff --git a/tests/providers/jenkins/__init__.py b/providers/tests/jenkins/sensors/__init__.py similarity index 100% rename from tests/providers/jenkins/__init__.py rename to providers/tests/jenkins/sensors/__init__.py diff --git a/tests/providers/jenkins/sensors/test_jenkins.py b/providers/tests/jenkins/sensors/test_jenkins.py similarity index 100% rename from tests/providers/jenkins/sensors/test_jenkins.py rename to providers/tests/jenkins/sensors/test_jenkins.py diff --git a/tests/providers/jenkins/operators/__init__.py b/providers/tests/microsoft/__init__.py similarity index 100% rename from tests/providers/jenkins/operators/__init__.py rename to providers/tests/microsoft/__init__.py diff --git a/tests/providers/jenkins/sensors/__init__.py b/providers/tests/microsoft/azure/__init__.py similarity index 100% rename from tests/providers/jenkins/sensors/__init__.py rename to providers/tests/microsoft/azure/__init__.py diff --git a/tests/providers/microsoft/azure/base.py b/providers/tests/microsoft/azure/base.py similarity index 91% rename from tests/providers/microsoft/azure/base.py rename to providers/tests/microsoft/azure/base.py index cad6c1449fdca..98c0a59867ea4 100644 --- a/tests/providers/microsoft/azure/base.py +++ b/providers/tests/microsoft/azure/base.py @@ -26,7 +26,8 @@ from airflow.exceptions import TaskDeferred from airflow.providers.microsoft.azure.hooks.msgraph import KiotaRequestAdapterHook -from tests.providers.microsoft.conftest import get_airflow_connection, mock_context + +from providers.tests.microsoft.conftest import get_airflow_connection, mock_context if TYPE_CHECKING: from airflow.models import Operator @@ -39,9 +40,10 @@ def teardown_method(self, method): @contextmanager def patch_hook_and_request_adapter(self, response): - with patch( - "airflow.hooks.base.BaseHook.get_connection", side_effect=get_airflow_connection - ), patch.object(HttpxRequestAdapter, "get_http_response_message") as mock_get_http_response: + with ( + patch("airflow.hooks.base.BaseHook.get_connection", side_effect=get_airflow_connection), + patch.object(HttpxRequestAdapter, "get_http_response_message") as mock_get_http_response, + ): if isinstance(response, Exception): mock_get_http_response.side_effect = response else: diff --git a/tests/providers/microsoft/__init__.py b/providers/tests/microsoft/azure/fs/__init__.py similarity index 100% rename from tests/providers/microsoft/__init__.py rename to providers/tests/microsoft/azure/fs/__init__.py diff --git a/tests/providers/microsoft/azure/fs/test_adls.py b/providers/tests/microsoft/azure/fs/test_adls.py similarity index 100% rename from tests/providers/microsoft/azure/fs/test_adls.py rename to providers/tests/microsoft/azure/fs/test_adls.py diff --git a/tests/providers/microsoft/azure/__init__.py b/providers/tests/microsoft/azure/hooks/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/__init__.py rename to providers/tests/microsoft/azure/hooks/__init__.py diff --git a/tests/providers/microsoft/azure/hooks/test_adx.py b/providers/tests/microsoft/azure/hooks/test_adx.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_adx.py rename to providers/tests/microsoft/azure/hooks/test_adx.py diff --git a/tests/providers/microsoft/azure/hooks/test_asb.py b/providers/tests/microsoft/azure/hooks/test_asb.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_asb.py rename to providers/tests/microsoft/azure/hooks/test_asb.py diff --git a/tests/providers/microsoft/azure/hooks/test_base_azure.py b/providers/tests/microsoft/azure/hooks/test_base_azure.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_base_azure.py rename to providers/tests/microsoft/azure/hooks/test_base_azure.py diff --git a/tests/providers/microsoft/azure/hooks/test_batch.py b/providers/tests/microsoft/azure/hooks/test_batch.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_batch.py rename to providers/tests/microsoft/azure/hooks/test_batch.py diff --git a/tests/providers/microsoft/azure/hooks/test_container_instance.py b/providers/tests/microsoft/azure/hooks/test_container_instance.py similarity index 95% rename from tests/providers/microsoft/azure/hooks/test_container_instance.py rename to providers/tests/microsoft/azure/hooks/test_container_instance.py index 09dfc167883b0..38b2c743d57f3 100644 --- a/tests/providers/microsoft/azure/hooks/test_container_instance.py +++ b/providers/tests/microsoft/azure/hooks/test_container_instance.py @@ -60,10 +60,13 @@ def setup_test_cases(self, create_mock_connection): ) self.resources = ResourceRequirements(requests=ResourceRequests(memory_in_gb="4", cpu="1")) self.hook = AzureContainerInstanceHook(azure_conn_id=mock_connection.conn_id) - with patch("azure.mgmt.containerinstance.ContainerInstanceManagementClient"), patch( - "azure.common.credentials.ServicePrincipalCredentials.__init__", - autospec=True, - return_value=None, + with ( + patch("azure.mgmt.containerinstance.ContainerInstanceManagementClient"), + patch( + "azure.common.credentials.ServicePrincipalCredentials.__init__", + autospec=True, + return_value=None, + ), ): yield diff --git a/tests/providers/microsoft/azure/hooks/test_container_registry.py b/providers/tests/microsoft/azure/hooks/test_container_registry.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_container_registry.py rename to providers/tests/microsoft/azure/hooks/test_container_registry.py diff --git a/tests/providers/microsoft/azure/hooks/test_container_volume.py b/providers/tests/microsoft/azure/hooks/test_container_volume.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_container_volume.py rename to providers/tests/microsoft/azure/hooks/test_container_volume.py diff --git a/tests/providers/microsoft/azure/hooks/test_cosmos.py b/providers/tests/microsoft/azure/hooks/test_cosmos.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_cosmos.py rename to providers/tests/microsoft/azure/hooks/test_cosmos.py diff --git a/tests/providers/microsoft/azure/hooks/test_data_factory.py b/providers/tests/microsoft/azure/hooks/test_data_factory.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_data_factory.py rename to providers/tests/microsoft/azure/hooks/test_data_factory.py diff --git a/tests/providers/microsoft/azure/hooks/test_data_lake.py b/providers/tests/microsoft/azure/hooks/test_data_lake.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_data_lake.py rename to providers/tests/microsoft/azure/hooks/test_data_lake.py diff --git a/tests/providers/microsoft/azure/hooks/test_fileshare.py b/providers/tests/microsoft/azure/hooks/test_fileshare.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_fileshare.py rename to providers/tests/microsoft/azure/hooks/test_fileshare.py diff --git a/tests/providers/microsoft/azure/hooks/test_msgraph.py b/providers/tests/microsoft/azure/hooks/test_msgraph.py similarity index 99% rename from tests/providers/microsoft/azure/hooks/test_msgraph.py rename to providers/tests/microsoft/azure/hooks/test_msgraph.py index 04e85525616bc..0ecad98548b51 100644 --- a/tests/providers/microsoft/azure/hooks/test_msgraph.py +++ b/providers/tests/microsoft/azure/hooks/test_msgraph.py @@ -30,7 +30,8 @@ DefaultResponseHandler, KiotaRequestAdapterHook, ) -from tests.providers.microsoft.conftest import ( + +from providers.tests.microsoft.conftest import ( get_airflow_connection, load_file, load_json, diff --git a/tests/providers/microsoft/azure/hooks/test_powerbi.py b/providers/tests/microsoft/azure/hooks/test_powerbi.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_powerbi.py rename to providers/tests/microsoft/azure/hooks/test_powerbi.py diff --git a/tests/providers/microsoft/azure/hooks/test_synapse.py b/providers/tests/microsoft/azure/hooks/test_synapse.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_synapse.py rename to providers/tests/microsoft/azure/hooks/test_synapse.py diff --git a/tests/providers/microsoft/azure/hooks/test_synapse_pipeline.py b/providers/tests/microsoft/azure/hooks/test_synapse_pipeline.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_synapse_pipeline.py rename to providers/tests/microsoft/azure/hooks/test_synapse_pipeline.py diff --git a/tests/providers/microsoft/azure/hooks/test_wasb.py b/providers/tests/microsoft/azure/hooks/test_wasb.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/test_wasb.py rename to providers/tests/microsoft/azure/hooks/test_wasb.py diff --git a/tests/providers/microsoft/azure/resources/__init__.py b/providers/tests/microsoft/azure/log/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/resources/__init__.py rename to providers/tests/microsoft/azure/log/__init__.py diff --git a/tests/providers/microsoft/azure/log/test_wasb_task_handler.py b/providers/tests/microsoft/azure/log/test_wasb_task_handler.py similarity index 98% rename from tests/providers/microsoft/azure/log/test_wasb_task_handler.py rename to providers/tests/microsoft/azure/log/test_wasb_task_handler.py index 7de68ec63a798..224961efe4336 100644 --- a/tests/providers/microsoft/azure/log/test_wasb_task_handler.py +++ b/providers/tests/microsoft/azure/log/test_wasb_task_handler.py @@ -29,8 +29,9 @@ from airflow.providers.microsoft.azure.log.wasb_task_handler import WasbTaskHandler from airflow.utils.state import TaskInstanceState from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/providers/microsoft/azure/fs/__init__.py b/providers/tests/microsoft/azure/operators/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/fs/__init__.py rename to providers/tests/microsoft/azure/operators/__init__.py diff --git a/tests/providers/microsoft/azure/operators/test_adls_create.py b/providers/tests/microsoft/azure/operators/test_adls_create.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_adls_create.py rename to providers/tests/microsoft/azure/operators/test_adls_create.py diff --git a/tests/providers/microsoft/azure/operators/test_adls_delete.py b/providers/tests/microsoft/azure/operators/test_adls_delete.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_adls_delete.py rename to providers/tests/microsoft/azure/operators/test_adls_delete.py diff --git a/tests/providers/microsoft/azure/operators/test_adls_list.py b/providers/tests/microsoft/azure/operators/test_adls_list.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_adls_list.py rename to providers/tests/microsoft/azure/operators/test_adls_list.py diff --git a/tests/providers/microsoft/azure/operators/test_adx.py b/providers/tests/microsoft/azure/operators/test_adx.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_adx.py rename to providers/tests/microsoft/azure/operators/test_adx.py diff --git a/tests/providers/microsoft/azure/operators/test_asb.py b/providers/tests/microsoft/azure/operators/test_asb.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_asb.py rename to providers/tests/microsoft/azure/operators/test_asb.py diff --git a/tests/providers/microsoft/azure/operators/test_batch.py b/providers/tests/microsoft/azure/operators/test_batch.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_batch.py rename to providers/tests/microsoft/azure/operators/test_batch.py diff --git a/tests/providers/microsoft/azure/operators/test_container_instances.py b/providers/tests/microsoft/azure/operators/test_container_instances.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_container_instances.py rename to providers/tests/microsoft/azure/operators/test_container_instances.py diff --git a/tests/providers/microsoft/azure/operators/test_cosmos.py b/providers/tests/microsoft/azure/operators/test_cosmos.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_cosmos.py rename to providers/tests/microsoft/azure/operators/test_cosmos.py diff --git a/tests/providers/microsoft/azure/operators/test_data_factory.py b/providers/tests/microsoft/azure/operators/test_data_factory.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_data_factory.py rename to providers/tests/microsoft/azure/operators/test_data_factory.py diff --git a/tests/providers/microsoft/azure/operators/test_msgraph.py b/providers/tests/microsoft/azure/operators/test_msgraph.py similarity index 98% rename from tests/providers/microsoft/azure/operators/test_msgraph.py rename to providers/tests/microsoft/azure/operators/test_msgraph.py index 754b653ccdaf0..372152fe979ee 100644 --- a/tests/providers/microsoft/azure/operators/test_msgraph.py +++ b/providers/tests/microsoft/azure/operators/test_msgraph.py @@ -25,8 +25,9 @@ from airflow.exceptions import AirflowException from airflow.providers.microsoft.azure.operators.msgraph import MSGraphAsyncOperator from airflow.triggers.base import TriggerEvent -from tests.providers.microsoft.azure.base import Base -from tests.providers.microsoft.conftest import ( + +from providers.tests.microsoft.azure.base import Base +from providers.tests.microsoft.conftest import ( load_file, load_json, mock_context, diff --git a/tests/providers/microsoft/azure/operators/test_powerbi.py b/providers/tests/microsoft/azure/operators/test_powerbi.py similarity index 97% rename from tests/providers/microsoft/azure/operators/test_powerbi.py rename to providers/tests/microsoft/azure/operators/test_powerbi.py index 35bb76f782ce3..a9171cff39a6e 100644 --- a/tests/providers/microsoft/azure/operators/test_powerbi.py +++ b/providers/tests/microsoft/azure/operators/test_powerbi.py @@ -30,8 +30,9 @@ from airflow.providers.microsoft.azure.operators.powerbi import PowerBIDatasetRefreshOperator from airflow.providers.microsoft.azure.triggers.powerbi import PowerBITrigger from airflow.utils import timezone -from tests.providers.microsoft.azure.base import Base -from tests.providers.microsoft.conftest import get_airflow_connection, mock_context + +from providers.tests.microsoft.azure.base import Base +from providers.tests.microsoft.conftest import get_airflow_connection, mock_context DEFAULT_CONNECTION_CLIENT_SECRET = "powerbi_conn_id" TASK_ID = "run_powerbi_operator" diff --git a/tests/providers/microsoft/azure/operators/test_synapse.py b/providers/tests/microsoft/azure/operators/test_synapse.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_synapse.py rename to providers/tests/microsoft/azure/operators/test_synapse.py diff --git a/tests/providers/microsoft/azure/operators/test_wasb_delete_blob.py b/providers/tests/microsoft/azure/operators/test_wasb_delete_blob.py similarity index 100% rename from tests/providers/microsoft/azure/operators/test_wasb_delete_blob.py rename to providers/tests/microsoft/azure/operators/test_wasb_delete_blob.py diff --git a/tests/providers/microsoft/azure/secrets/__init__.py b/providers/tests/microsoft/azure/resources/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/secrets/__init__.py rename to providers/tests/microsoft/azure/resources/__init__.py diff --git a/tests/providers/microsoft/azure/resources/dummy.pdf b/providers/tests/microsoft/azure/resources/dummy.pdf similarity index 100% rename from tests/providers/microsoft/azure/resources/dummy.pdf rename to providers/tests/microsoft/azure/resources/dummy.pdf diff --git a/tests/providers/microsoft/azure/resources/next_users.json b/providers/tests/microsoft/azure/resources/next_users.json similarity index 100% rename from tests/providers/microsoft/azure/resources/next_users.json rename to providers/tests/microsoft/azure/resources/next_users.json diff --git a/tests/providers/microsoft/azure/resources/status.json b/providers/tests/microsoft/azure/resources/status.json similarity index 100% rename from tests/providers/microsoft/azure/resources/status.json rename to providers/tests/microsoft/azure/resources/status.json diff --git a/tests/providers/microsoft/azure/resources/users.json b/providers/tests/microsoft/azure/resources/users.json similarity index 100% rename from tests/providers/microsoft/azure/resources/users.json rename to providers/tests/microsoft/azure/resources/users.json diff --git a/tests/providers/microsoft/azure/transfers/__init__.py b/providers/tests/microsoft/azure/secrets/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/transfers/__init__.py rename to providers/tests/microsoft/azure/secrets/__init__.py diff --git a/tests/providers/microsoft/azure/secrets/test_key_vault.py b/providers/tests/microsoft/azure/secrets/test_key_vault.py similarity index 100% rename from tests/providers/microsoft/azure/secrets/test_key_vault.py rename to providers/tests/microsoft/azure/secrets/test_key_vault.py diff --git a/tests/providers/microsoft/azure/hooks/__init__.py b/providers/tests/microsoft/azure/sensors/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/hooks/__init__.py rename to providers/tests/microsoft/azure/sensors/__init__.py diff --git a/tests/providers/microsoft/azure/sensors/test_cosmos.py b/providers/tests/microsoft/azure/sensors/test_cosmos.py similarity index 100% rename from tests/providers/microsoft/azure/sensors/test_cosmos.py rename to providers/tests/microsoft/azure/sensors/test_cosmos.py diff --git a/tests/providers/microsoft/azure/sensors/test_data_factory.py b/providers/tests/microsoft/azure/sensors/test_data_factory.py similarity index 100% rename from tests/providers/microsoft/azure/sensors/test_data_factory.py rename to providers/tests/microsoft/azure/sensors/test_data_factory.py diff --git a/tests/providers/microsoft/azure/sensors/test_msgraph.py b/providers/tests/microsoft/azure/sensors/test_msgraph.py similarity index 95% rename from tests/providers/microsoft/azure/sensors/test_msgraph.py rename to providers/tests/microsoft/azure/sensors/test_msgraph.py index e257984affb1a..ba5ba35478861 100644 --- a/tests/providers/microsoft/azure/sensors/test_msgraph.py +++ b/providers/tests/microsoft/azure/sensors/test_msgraph.py @@ -20,8 +20,9 @@ from airflow.providers.microsoft.azure.sensors.msgraph import MSGraphSensor from airflow.triggers.base import TriggerEvent -from tests.providers.microsoft.azure.base import Base -from tests.providers.microsoft.conftest import load_json, mock_json_response + +from providers.tests.microsoft.azure.base import Base +from providers.tests.microsoft.conftest import load_json, mock_json_response class TestMSGraphSensor(Base): diff --git a/tests/providers/microsoft/azure/sensors/test_wasb.py b/providers/tests/microsoft/azure/sensors/test_wasb.py similarity index 100% rename from tests/providers/microsoft/azure/sensors/test_wasb.py rename to providers/tests/microsoft/azure/sensors/test_wasb.py diff --git a/tests/providers/microsoft/azure/test_utils.py b/providers/tests/microsoft/azure/test_utils.py similarity index 100% rename from tests/providers/microsoft/azure/test_utils.py rename to providers/tests/microsoft/azure/test_utils.py diff --git a/tests/providers/microsoft/azure/triggers/__init__.py b/providers/tests/microsoft/azure/transfers/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/triggers/__init__.py rename to providers/tests/microsoft/azure/transfers/__init__.py diff --git a/tests/providers/microsoft/azure/transfers/test_local_to_adls.py b/providers/tests/microsoft/azure/transfers/test_local_to_adls.py similarity index 100% rename from tests/providers/microsoft/azure/transfers/test_local_to_adls.py rename to providers/tests/microsoft/azure/transfers/test_local_to_adls.py diff --git a/tests/providers/microsoft/azure/transfers/test_local_to_wasb.py b/providers/tests/microsoft/azure/transfers/test_local_to_wasb.py similarity index 100% rename from tests/providers/microsoft/azure/transfers/test_local_to_wasb.py rename to providers/tests/microsoft/azure/transfers/test_local_to_wasb.py diff --git a/tests/providers/microsoft/azure/transfers/test_oracle_to_azure_data_lake.py b/providers/tests/microsoft/azure/transfers/test_oracle_to_azure_data_lake.py similarity index 100% rename from tests/providers/microsoft/azure/transfers/test_oracle_to_azure_data_lake.py rename to providers/tests/microsoft/azure/transfers/test_oracle_to_azure_data_lake.py diff --git a/tests/providers/microsoft/azure/transfers/test_s3_to_wasb.py b/providers/tests/microsoft/azure/transfers/test_s3_to_wasb.py similarity index 100% rename from tests/providers/microsoft/azure/transfers/test_s3_to_wasb.py rename to providers/tests/microsoft/azure/transfers/test_s3_to_wasb.py diff --git a/tests/providers/microsoft/azure/transfers/test_sftp_to_wasb.py b/providers/tests/microsoft/azure/transfers/test_sftp_to_wasb.py similarity index 100% rename from tests/providers/microsoft/azure/transfers/test_sftp_to_wasb.py rename to providers/tests/microsoft/azure/transfers/test_sftp_to_wasb.py diff --git a/tests/providers/microsoft/mssql/operators/__init__.py b/providers/tests/microsoft/azure/triggers/__init__.py similarity index 100% rename from tests/providers/microsoft/mssql/operators/__init__.py rename to providers/tests/microsoft/azure/triggers/__init__.py diff --git a/tests/providers/microsoft/azure/triggers/test_data_factory.py b/providers/tests/microsoft/azure/triggers/test_data_factory.py similarity index 100% rename from tests/providers/microsoft/azure/triggers/test_data_factory.py rename to providers/tests/microsoft/azure/triggers/test_data_factory.py diff --git a/tests/providers/microsoft/azure/triggers/test_msgraph.py b/providers/tests/microsoft/azure/triggers/test_msgraph.py similarity index 98% rename from tests/providers/microsoft/azure/triggers/test_msgraph.py rename to providers/tests/microsoft/azure/triggers/test_msgraph.py index 23085563cf8f9..0784d8d83177c 100644 --- a/tests/providers/microsoft/azure/triggers/test_msgraph.py +++ b/providers/tests/microsoft/azure/triggers/test_msgraph.py @@ -31,8 +31,9 @@ ResponseSerializer, ) from airflow.triggers.base import TriggerEvent -from tests.providers.microsoft.azure.base import Base -from tests.providers.microsoft.conftest import ( + +from providers.tests.microsoft.azure.base import Base +from providers.tests.microsoft.conftest import ( get_airflow_connection, load_file, load_json, diff --git a/tests/providers/microsoft/azure/triggers/test_powerbi.py b/providers/tests/microsoft/azure/triggers/test_powerbi.py similarity index 99% rename from tests/providers/microsoft/azure/triggers/test_powerbi.py rename to providers/tests/microsoft/azure/triggers/test_powerbi.py index c3276e258b3da..8f5a3e945ffcd 100644 --- a/tests/providers/microsoft/azure/triggers/test_powerbi.py +++ b/providers/tests/microsoft/azure/triggers/test_powerbi.py @@ -25,7 +25,8 @@ from airflow.providers.microsoft.azure.hooks.powerbi import PowerBIDatasetRefreshStatus from airflow.providers.microsoft.azure.triggers.powerbi import PowerBITrigger from airflow.triggers.base import TriggerEvent -from tests.providers.microsoft.conftest import get_airflow_connection + +from providers.tests.microsoft.conftest import get_airflow_connection POWERBI_CONN_ID = "powerbi_default" DATASET_ID = "dataset_id" diff --git a/tests/providers/microsoft/azure/triggers/test_wasb.py b/providers/tests/microsoft/azure/triggers/test_wasb.py similarity index 100% rename from tests/providers/microsoft/azure/triggers/test_wasb.py rename to providers/tests/microsoft/azure/triggers/test_wasb.py diff --git a/tests/providers/microsoft/conftest.py b/providers/tests/microsoft/conftest.py similarity index 100% rename from tests/providers/microsoft/conftest.py rename to providers/tests/microsoft/conftest.py diff --git a/tests/providers/microsoft/azure/operators/__init__.py b/providers/tests/microsoft/mssql/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/operators/__init__.py rename to providers/tests/microsoft/mssql/__init__.py diff --git a/tests/providers/microsoft/azure/sensors/__init__.py b/providers/tests/microsoft/mssql/hooks/__init__.py similarity index 100% rename from tests/providers/microsoft/azure/sensors/__init__.py rename to providers/tests/microsoft/mssql/hooks/__init__.py diff --git a/tests/providers/microsoft/mssql/hooks/test_mssql.py b/providers/tests/microsoft/mssql/hooks/test_mssql.py similarity index 99% rename from tests/providers/microsoft/mssql/hooks/test_mssql.py rename to providers/tests/microsoft/mssql/hooks/test_mssql.py index c629bf7b0d9a4..1b43bb787835b 100644 --- a/tests/providers/microsoft/mssql/hooks/test_mssql.py +++ b/providers/tests/microsoft/mssql/hooks/test_mssql.py @@ -23,7 +23,8 @@ import pytest from airflow.models import Connection -from tests.providers.microsoft.conftest import load_file + +from providers.tests.microsoft.conftest import load_file try: from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook diff --git a/tests/providers/mysql/assets/__init__.py b/providers/tests/microsoft/mssql/operators/__init__.py similarity index 100% rename from tests/providers/mysql/assets/__init__.py rename to providers/tests/microsoft/mssql/operators/__init__.py diff --git a/tests/providers/microsoft/mssql/operators/test_mssql.py b/providers/tests/microsoft/mssql/operators/test_mssql.py similarity index 100% rename from tests/providers/microsoft/mssql/operators/test_mssql.py rename to providers/tests/microsoft/mssql/operators/test_mssql.py diff --git a/tests/providers/microsoft/mssql/__init__.py b/providers/tests/microsoft/mssql/resources/__init__.py similarity index 100% rename from tests/providers/microsoft/mssql/__init__.py rename to providers/tests/microsoft/mssql/resources/__init__.py diff --git a/tests/providers/microsoft/mssql/resources/replace.sql b/providers/tests/microsoft/mssql/resources/replace.sql similarity index 100% rename from tests/providers/microsoft/mssql/resources/replace.sql rename to providers/tests/microsoft/mssql/resources/replace.sql diff --git a/tests/providers/microsoft/mssql/hooks/__init__.py b/providers/tests/microsoft/psrp/__init__.py similarity index 100% rename from tests/providers/microsoft/mssql/hooks/__init__.py rename to providers/tests/microsoft/psrp/__init__.py diff --git a/tests/providers/microsoft/mssql/resources/__init__.py b/providers/tests/microsoft/psrp/hooks/__init__.py similarity index 100% rename from tests/providers/microsoft/mssql/resources/__init__.py rename to providers/tests/microsoft/psrp/hooks/__init__.py diff --git a/tests/providers/microsoft/psrp/hooks/test_psrp.py b/providers/tests/microsoft/psrp/hooks/test_psrp.py similarity index 96% rename from tests/providers/microsoft/psrp/hooks/test_psrp.py rename to providers/tests/microsoft/psrp/hooks/test_psrp.py index fe271f5c39e55..1f56e67a7a689 100644 --- a/tests/providers/microsoft/psrp/hooks/test_psrp.py +++ b/providers/tests/microsoft/psrp/hooks/test_psrp.py @@ -139,13 +139,16 @@ def test_invoke(self, runspace_pool, powershell, ws_man, logging_level): on_output_callback = Mock() - with PsrpHook( - CONNECTION_ID, - runspace_options=runspace_options, - wsman_options=wsman_options, - on_output_callback=on_output_callback, - **options, - ) as hook, patch.object(type(hook), "log") as logger: + with ( + PsrpHook( + CONNECTION_ID, + runspace_options=runspace_options, + wsman_options=wsman_options, + on_output_callback=on_output_callback, + **options, + ) as hook, + patch.object(type(hook), "log") as logger, + ): error_match = "Process had one or more errors" with pytest.raises(AirflowException, match=error_match): # noqa: PT012 error happen on context exit with hook.invoke() as ps: diff --git a/tests/providers/microsoft/psrp/__init__.py b/providers/tests/microsoft/psrp/operators/__init__.py similarity index 100% rename from tests/providers/microsoft/psrp/__init__.py rename to providers/tests/microsoft/psrp/operators/__init__.py diff --git a/tests/providers/microsoft/psrp/operators/test_psrp.py b/providers/tests/microsoft/psrp/operators/test_psrp.py similarity index 100% rename from tests/providers/microsoft/psrp/operators/test_psrp.py rename to providers/tests/microsoft/psrp/operators/test_psrp.py diff --git a/tests/providers/microsoft/psrp/hooks/__init__.py b/providers/tests/microsoft/winrm/__init__.py similarity index 100% rename from tests/providers/microsoft/psrp/hooks/__init__.py rename to providers/tests/microsoft/winrm/__init__.py diff --git a/tests/providers/microsoft/psrp/operators/__init__.py b/providers/tests/microsoft/winrm/hooks/__init__.py similarity index 100% rename from tests/providers/microsoft/psrp/operators/__init__.py rename to providers/tests/microsoft/winrm/hooks/__init__.py diff --git a/tests/providers/microsoft/winrm/hooks/test_winrm.py b/providers/tests/microsoft/winrm/hooks/test_winrm.py similarity index 100% rename from tests/providers/microsoft/winrm/hooks/test_winrm.py rename to providers/tests/microsoft/winrm/hooks/test_winrm.py diff --git a/tests/providers/microsoft/winrm/__init__.py b/providers/tests/microsoft/winrm/operators/__init__.py similarity index 100% rename from tests/providers/microsoft/winrm/__init__.py rename to providers/tests/microsoft/winrm/operators/__init__.py diff --git a/tests/providers/microsoft/winrm/operators/test_winrm.py b/providers/tests/microsoft/winrm/operators/test_winrm.py similarity index 100% rename from tests/providers/microsoft/winrm/operators/test_winrm.py rename to providers/tests/microsoft/winrm/operators/test_winrm.py diff --git a/tests/providers/microsoft/winrm/hooks/__init__.py b/providers/tests/mongo/__init__.py similarity index 100% rename from tests/providers/microsoft/winrm/hooks/__init__.py rename to providers/tests/mongo/__init__.py diff --git a/tests/providers/microsoft/winrm/operators/__init__.py b/providers/tests/mongo/hooks/__init__.py similarity index 100% rename from tests/providers/microsoft/winrm/operators/__init__.py rename to providers/tests/mongo/hooks/__init__.py diff --git a/tests/providers/mongo/hooks/test_mongo.py b/providers/tests/mongo/hooks/test_mongo.py similarity index 99% rename from tests/providers/mongo/hooks/test_mongo.py rename to providers/tests/mongo/hooks/test_mongo.py index 86b49535fa49b..78756b0cc4890 100644 --- a/tests/providers/mongo/hooks/test_mongo.py +++ b/providers/tests/mongo/hooks/test_mongo.py @@ -27,7 +27,8 @@ from airflow.exceptions import AirflowConfigException, AirflowProviderDeprecationWarning from airflow.models import Connection from airflow.providers.mongo.hooks.mongo import MongoHook -from tests.test_utils.compat import connection_as_json + +from dev.tests_common.test_utils.compat import connection_as_json pytestmark = pytest.mark.db_test diff --git a/tests/providers/mongo/__init__.py b/providers/tests/mongo/sensors/__init__.py similarity index 100% rename from tests/providers/mongo/__init__.py rename to providers/tests/mongo/sensors/__init__.py diff --git a/tests/providers/mongo/sensors/test_mongo.py b/providers/tests/mongo/sensors/test_mongo.py similarity index 100% rename from tests/providers/mongo/sensors/test_mongo.py rename to providers/tests/mongo/sensors/test_mongo.py diff --git a/tests/providers/mongo/hooks/__init__.py b/providers/tests/mysql/__init__.py similarity index 100% rename from tests/providers/mongo/hooks/__init__.py rename to providers/tests/mysql/__init__.py diff --git a/tests/providers/mysql/transfers/__init__.py b/providers/tests/mysql/assets/__init__.py similarity index 100% rename from tests/providers/mysql/transfers/__init__.py rename to providers/tests/mysql/assets/__init__.py diff --git a/tests/providers/mysql/assets/test_mysql.py b/providers/tests/mysql/assets/test_mysql.py similarity index 100% rename from tests/providers/mysql/assets/test_mysql.py rename to providers/tests/mysql/assets/test_mysql.py diff --git a/tests/providers/mongo/sensors/__init__.py b/providers/tests/mysql/hooks/__init__.py similarity index 100% rename from tests/providers/mongo/sensors/__init__.py rename to providers/tests/mysql/hooks/__init__.py diff --git a/tests/providers/mysql/hooks/test_mysql.py b/providers/tests/mysql/hooks/test_mysql.py similarity index 99% rename from tests/providers/mysql/hooks/test_mysql.py rename to providers/tests/mysql/hooks/test_mysql.py index 48fc62fe2c220..23748ae9cac19 100644 --- a/tests/providers/mysql/hooks/test_mysql.py +++ b/providers/tests/mysql/hooks/test_mysql.py @@ -37,7 +37,8 @@ from airflow.utils import timezone -from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces + +from dev.tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces SSL_DICT = {"cert": "/tmp/client-cert.pem", "ca": "/tmp/server-ca.pem", "key": "/tmp/client-key.pem"} diff --git a/tests/providers/mysql/hooks/test_mysql_connector_python.py b/providers/tests/mysql/hooks/test_mysql_connector_python.py similarity index 100% rename from tests/providers/mysql/hooks/test_mysql_connector_python.py rename to providers/tests/mysql/hooks/test_mysql_connector_python.py diff --git a/tests/providers/mysql/__init__.py b/providers/tests/mysql/operators/__init__.py similarity index 100% rename from tests/providers/mysql/__init__.py rename to providers/tests/mysql/operators/__init__.py diff --git a/tests/providers/mysql/operators/test_mysql.py b/providers/tests/mysql/operators/test_mysql.py similarity index 99% rename from tests/providers/mysql/operators/test_mysql.py rename to providers/tests/mysql/operators/test_mysql.py index 10a1fcc151a82..75f0aed2935f3 100644 --- a/tests/providers/mysql/operators/test_mysql.py +++ b/providers/tests/mysql/operators/test_mysql.py @@ -34,7 +34,8 @@ from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator from airflow.providers.mysql.hooks.mysql import MySqlHook from airflow.utils import timezone -from tests.providers.mysql.hooks.test_mysql import MySqlContext + +from providers.tests.mysql.hooks.test_mysql import MySqlContext DEFAULT_DATE = timezone.datetime(2015, 1, 1) DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat() diff --git a/tests/providers/odbc/__init__.py b/providers/tests/mysql/transfers/__init__.py similarity index 100% rename from tests/providers/odbc/__init__.py rename to providers/tests/mysql/transfers/__init__.py diff --git a/tests/providers/mysql/transfers/test_presto_to_mysql.py b/providers/tests/mysql/transfers/test_presto_to_mysql.py similarity index 100% rename from tests/providers/mysql/transfers/test_presto_to_mysql.py rename to providers/tests/mysql/transfers/test_presto_to_mysql.py diff --git a/tests/providers/mysql/transfers/test_s3_to_mysql.py b/providers/tests/mysql/transfers/test_s3_to_mysql.py similarity index 100% rename from tests/providers/mysql/transfers/test_s3_to_mysql.py rename to providers/tests/mysql/transfers/test_s3_to_mysql.py diff --git a/tests/providers/mysql/transfers/test_trino_to_mysql.py b/providers/tests/mysql/transfers/test_trino_to_mysql.py similarity index 100% rename from tests/providers/mysql/transfers/test_trino_to_mysql.py rename to providers/tests/mysql/transfers/test_trino_to_mysql.py diff --git a/tests/providers/mysql/transfers/test_vertica_to_mysql.py b/providers/tests/mysql/transfers/test_vertica_to_mysql.py similarity index 100% rename from tests/providers/mysql/transfers/test_vertica_to_mysql.py rename to providers/tests/mysql/transfers/test_vertica_to_mysql.py diff --git a/tests/providers/mysql/hooks/__init__.py b/providers/tests/neo4j/__init__.py similarity index 100% rename from tests/providers/mysql/hooks/__init__.py rename to providers/tests/neo4j/__init__.py diff --git a/tests/providers/mysql/operators/__init__.py b/providers/tests/neo4j/hooks/__init__.py similarity index 100% rename from tests/providers/mysql/operators/__init__.py rename to providers/tests/neo4j/hooks/__init__.py diff --git a/tests/providers/neo4j/hooks/test_neo4j.py b/providers/tests/neo4j/hooks/test_neo4j.py similarity index 100% rename from tests/providers/neo4j/hooks/test_neo4j.py rename to providers/tests/neo4j/hooks/test_neo4j.py diff --git a/tests/providers/neo4j/__init__.py b/providers/tests/neo4j/operators/__init__.py similarity index 100% rename from tests/providers/neo4j/__init__.py rename to providers/tests/neo4j/operators/__init__.py diff --git a/tests/providers/neo4j/operators/test_neo4j.py b/providers/tests/neo4j/operators/test_neo4j.py similarity index 100% rename from tests/providers/neo4j/operators/test_neo4j.py rename to providers/tests/neo4j/operators/test_neo4j.py diff --git a/tests/providers/odbc/hooks/__init__.py b/providers/tests/odbc/__init__.py similarity index 100% rename from tests/providers/odbc/hooks/__init__.py rename to providers/tests/odbc/__init__.py diff --git a/tests/providers/openai/__init__.py b/providers/tests/odbc/hooks/__init__.py similarity index 100% rename from tests/providers/openai/__init__.py rename to providers/tests/odbc/hooks/__init__.py diff --git a/tests/providers/odbc/hooks/test_odbc.py b/providers/tests/odbc/hooks/test_odbc.py similarity index 99% rename from tests/providers/odbc/hooks/test_odbc.py rename to providers/tests/odbc/hooks/test_odbc.py index bddd2ffd996b6..8f749aa4f765d 100644 --- a/tests/providers/odbc/hooks/test_odbc.py +++ b/providers/tests/odbc/hooks/test_odbc.py @@ -28,7 +28,8 @@ import pytest from airflow.providers.odbc.hooks.odbc import OdbcHook -from tests.providers.common.sql.test_utils import mock_hook + +from providers.tests.common.sql.test_utils import mock_hook @pytest.fixture diff --git a/tests/providers/openai/hooks/__init__.py b/providers/tests/openai/__init__.py similarity index 100% rename from tests/providers/openai/hooks/__init__.py rename to providers/tests/openai/__init__.py diff --git a/tests/providers/openai/operators/__init__.py b/providers/tests/openai/hooks/__init__.py similarity index 100% rename from tests/providers/openai/operators/__init__.py rename to providers/tests/openai/hooks/__init__.py diff --git a/tests/providers/openai/hooks/test_openai.py b/providers/tests/openai/hooks/test_openai.py similarity index 100% rename from tests/providers/openai/hooks/test_openai.py rename to providers/tests/openai/hooks/test_openai.py diff --git a/tests/providers/openai/triggers/__init__.py b/providers/tests/openai/operators/__init__.py similarity index 100% rename from tests/providers/openai/triggers/__init__.py rename to providers/tests/openai/operators/__init__.py diff --git a/tests/providers/openai/operators/test_openai.py b/providers/tests/openai/operators/test_openai.py similarity index 100% rename from tests/providers/openai/operators/test_openai.py rename to providers/tests/openai/operators/test_openai.py diff --git a/tests/providers/openai/test_exceptions.py b/providers/tests/openai/test_exceptions.py similarity index 100% rename from tests/providers/openai/test_exceptions.py rename to providers/tests/openai/test_exceptions.py diff --git a/tests/providers/openlineage/__init__.py b/providers/tests/openai/triggers/__init__.py similarity index 100% rename from tests/providers/openlineage/__init__.py rename to providers/tests/openai/triggers/__init__.py diff --git a/tests/providers/openai/triggers/test_openai.py b/providers/tests/openai/triggers/test_openai.py similarity index 100% rename from tests/providers/openai/triggers/test_openai.py rename to providers/tests/openai/triggers/test_openai.py diff --git a/tests/providers/neo4j/hooks/__init__.py b/providers/tests/openfaas/__init__.py similarity index 100% rename from tests/providers/neo4j/hooks/__init__.py rename to providers/tests/openfaas/__init__.py diff --git a/tests/providers/neo4j/operators/__init__.py b/providers/tests/openfaas/hooks/__init__.py similarity index 100% rename from tests/providers/neo4j/operators/__init__.py rename to providers/tests/openfaas/hooks/__init__.py diff --git a/tests/providers/openfaas/hooks/test_openfaas.py b/providers/tests/openfaas/hooks/test_openfaas.py similarity index 100% rename from tests/providers/openfaas/hooks/test_openfaas.py rename to providers/tests/openfaas/hooks/test_openfaas.py diff --git a/tests/providers/openlineage/extractors/__init__.py b/providers/tests/openlineage/__init__.py similarity index 100% rename from tests/providers/openlineage/extractors/__init__.py rename to providers/tests/openlineage/__init__.py diff --git a/tests/providers/openlineage/plugins/__init__.py b/providers/tests/openlineage/extractors/__init__.py similarity index 100% rename from tests/providers/openlineage/plugins/__init__.py rename to providers/tests/openlineage/extractors/__init__.py diff --git a/tests/providers/openlineage/extractors/test_base.py b/providers/tests/openlineage/extractors/test_base.py similarity index 99% rename from tests/providers/openlineage/extractors/test_base.py rename to providers/tests/openlineage/extractors/test_base.py index 88234d317472e..15c96ac675530 100644 --- a/tests/providers/openlineage/extractors/test_base.py +++ b/providers/tests/openlineage/extractors/test_base.py @@ -277,7 +277,7 @@ def test_extract_on_failure(task_state, is_airflow_2_10_or_higher, should_call_o @mock.patch("airflow.providers.openlineage.conf.custom_extractors") def test_extractors_env_var(custom_extractors): - custom_extractors.return_value = {"tests.providers.openlineage.extractors.test_base.ExampleExtractor"} + custom_extractors.return_value = {"providers.tests.openlineage.extractors.test_base.ExampleExtractor"} extractor = ExtractorManager().get_extractor_class(ExampleOperator(task_id="example")) assert extractor is ExampleExtractor diff --git a/tests/providers/openlineage/extractors/test_bash.py b/providers/tests/openlineage/extractors/test_bash.py similarity index 98% rename from tests/providers/openlineage/extractors/test_bash.py rename to providers/tests/openlineage/extractors/test_bash.py index fc862e5ee30b9..d4fcdf7af2b7a 100644 --- a/tests/providers/openlineage/extractors/test_bash.py +++ b/providers/tests/openlineage/extractors/test_bash.py @@ -27,7 +27,8 @@ from airflow import DAG from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.openlineage.extractors.bash import BashExtractor -from tests.test_utils.compat import BashOperator + +from dev.tests_common.test_utils.compat import BashOperator pytestmark = pytest.mark.db_test diff --git a/tests/providers/openlineage/extractors/test_manager.py b/providers/tests/openlineage/extractors/test_manager.py similarity index 99% rename from tests/providers/openlineage/extractors/test_manager.py rename to providers/tests/openlineage/extractors/test_manager.py index 601a456604843..6bbf303e32857 100644 --- a/tests/providers/openlineage/extractors/test_manager.py +++ b/providers/tests/openlineage/extractors/test_manager.py @@ -34,7 +34,8 @@ from airflow.providers.openlineage.extractors.manager import ExtractorManager from airflow.providers.openlineage.utils.utils import Asset from airflow.utils.state import State -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS if TYPE_CHECKING: from airflow.utils.context import Context diff --git a/tests/providers/openlineage/extractors/test_python.py b/providers/tests/openlineage/extractors/test_python.py similarity index 98% rename from tests/providers/openlineage/extractors/test_python.py rename to providers/tests/openlineage/extractors/test_python.py index 44c5503b712d8..ef4fc0b861e7c 100644 --- a/tests/providers/openlineage/extractors/test_python.py +++ b/providers/tests/openlineage/extractors/test_python.py @@ -30,7 +30,8 @@ from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.operators.python import PythonOperator from airflow.providers.openlineage.extractors.python import PythonExtractor -from tests.test_utils.compat import BashOperator + +from dev.tests_common.test_utils.compat import BashOperator pytestmark = pytest.mark.db_test diff --git a/tests/providers/openlineage/log_config.py b/providers/tests/openlineage/log_config.py similarity index 100% rename from tests/providers/openlineage/log_config.py rename to providers/tests/openlineage/log_config.py diff --git a/tests/providers/openlineage/plugins/openlineage_configs/__init__.py b/providers/tests/openlineage/plugins/__init__.py similarity index 100% rename from tests/providers/openlineage/plugins/openlineage_configs/__init__.py rename to providers/tests/openlineage/plugins/__init__.py diff --git a/tests/providers/openlineage/utils/__init__.py b/providers/tests/openlineage/plugins/openlineage_configs/__init__.py similarity index 100% rename from tests/providers/openlineage/utils/__init__.py rename to providers/tests/openlineage/plugins/openlineage_configs/__init__.py diff --git a/tests/providers/openlineage/plugins/openlineage_configs/http.yaml b/providers/tests/openlineage/plugins/openlineage_configs/http.yaml similarity index 100% rename from tests/providers/openlineage/plugins/openlineage_configs/http.yaml rename to providers/tests/openlineage/plugins/openlineage_configs/http.yaml diff --git a/tests/providers/openlineage/plugins/test_adapter.py b/providers/tests/openlineage/plugins/test_adapter.py similarity index 99% rename from tests/providers/openlineage/plugins/test_adapter.py rename to providers/tests/openlineage/plugins/test_adapter.py index b01fe46fdca13..88f2250c638ef 100644 --- a/tests/providers/openlineage/plugins/test_adapter.py +++ b/providers/tests/openlineage/plugins/test_adapter.py @@ -51,8 +51,9 @@ ) from airflow.providers.openlineage.utils.utils import get_airflow_job_facet from airflow.utils.task_group import TaskGroup -from tests.test_utils.compat import BashOperator -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import BashOperator +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/openlineage/plugins/test_execution.py b/providers/tests/openlineage/plugins/test_execution.py similarity index 98% rename from tests/providers/openlineage/plugins/test_execution.py rename to providers/tests/openlineage/plugins/test_execution.py index 8c0bdd55a1f96..c308047b1bc0d 100644 --- a/tests/providers/openlineage/plugins/test_execution.py +++ b/providers/tests/openlineage/plugins/test_execution.py @@ -36,8 +36,9 @@ from airflow.task.task_runner.standard_task_runner import StandardTaskRunner from airflow.utils import timezone from airflow.utils.state import State -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/openlineage/plugins/test_facets.py b/providers/tests/openlineage/plugins/test_facets.py similarity index 100% rename from tests/providers/openlineage/plugins/test_facets.py rename to providers/tests/openlineage/plugins/test_facets.py diff --git a/tests/providers/openlineage/plugins/test_listener.py b/providers/tests/openlineage/plugins/test_listener.py similarity index 99% rename from tests/providers/openlineage/plugins/test_listener.py rename to providers/tests/openlineage/plugins/test_listener.py index 57c0134f79d82..0c1651ccf0277 100644 --- a/tests/providers/openlineage/plugins/test_listener.py +++ b/providers/tests/openlineage/plugins/test_listener.py @@ -38,8 +38,9 @@ from airflow.providers.openlineage.plugins.listener import OpenLineageListener from airflow.providers.openlineage.utils.selective_enable import disable_lineage, enable_lineage from airflow.utils.state import DagRunState, State -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/openlineage/plugins/test_macros.py b/providers/tests/openlineage/plugins/test_macros.py similarity index 100% rename from tests/providers/openlineage/plugins/test_macros.py rename to providers/tests/openlineage/plugins/test_macros.py diff --git a/tests/providers/openlineage/plugins/test_openlineage.py b/providers/tests/openlineage/plugins/test_openlineage.py similarity index 97% rename from tests/providers/openlineage/plugins/test_openlineage.py rename to providers/tests/openlineage/plugins/test_openlineage.py index dcb8198ceccad..8685326a29432 100644 --- a/tests/providers/openlineage/plugins/test_openlineage.py +++ b/providers/tests/openlineage/plugins/test_openlineage.py @@ -23,8 +23,8 @@ import pytest -from tests.conftest import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES -from tests.test_utils.config import conf_vars +from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES +from dev.tests_common.test_utils.config import conf_vars @pytest.mark.skipif( diff --git a/tests/providers/openlineage/plugins/test_utils.py b/providers/tests/openlineage/plugins/test_utils.py similarity index 99% rename from tests/providers/openlineage/plugins/test_utils.py rename to providers/tests/openlineage/plugins/test_utils.py index 65874a5ecebf3..444839ccaef30 100644 --- a/tests/providers/openlineage/plugins/test_utils.py +++ b/providers/tests/openlineage/plugins/test_utils.py @@ -43,7 +43,8 @@ from airflow.utils import timezone from airflow.utils.log.secrets_masker import _secrets_masker from airflow.utils.state import State -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS, BashOperator + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS, BashOperator if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/openlineage/test_conf.py b/providers/tests/openlineage/test_conf.py similarity index 99% rename from tests/providers/openlineage/test_conf.py rename to providers/tests/openlineage/test_conf.py index f3c483d4ce142..7f78a6a4c2eea 100644 --- a/tests/providers/openlineage/test_conf.py +++ b/providers/tests/openlineage/test_conf.py @@ -38,7 +38,8 @@ selective_enable, transport, ) -from tests.test_utils.config import conf_vars, env_vars + +from dev.tests_common.test_utils.config import conf_vars, env_vars _CONFIG_SECTION = "openlineage" _VAR_CONFIG_PATH = "OPENLINEAGE_CONFIG" diff --git a/tests/providers/openlineage/test_sqlparser.py b/providers/tests/openlineage/test_sqlparser.py similarity index 100% rename from tests/providers/openlineage/test_sqlparser.py rename to providers/tests/openlineage/test_sqlparser.py diff --git a/tests/providers/opensearch/__init__.py b/providers/tests/openlineage/utils/__init__.py similarity index 100% rename from tests/providers/opensearch/__init__.py rename to providers/tests/openlineage/utils/__init__.py diff --git a/tests/providers/openlineage/utils/custom_facet_fixture.py b/providers/tests/openlineage/utils/custom_facet_fixture.py similarity index 100% rename from tests/providers/openlineage/utils/custom_facet_fixture.py rename to providers/tests/openlineage/utils/custom_facet_fixture.py diff --git a/tests/providers/openlineage/utils/test_selective_enable.py b/providers/tests/openlineage/utils/test_selective_enable.py similarity index 100% rename from tests/providers/openlineage/utils/test_selective_enable.py rename to providers/tests/openlineage/utils/test_selective_enable.py diff --git a/tests/providers/openlineage/utils/test_sql.py b/providers/tests/openlineage/utils/test_sql.py similarity index 100% rename from tests/providers/openlineage/utils/test_sql.py rename to providers/tests/openlineage/utils/test_sql.py diff --git a/tests/providers/openlineage/utils/test_utils.py b/providers/tests/openlineage/utils/test_utils.py similarity index 96% rename from tests/providers/openlineage/utils/test_utils.py rename to providers/tests/openlineage/utils/test_utils.py index 20eba76adeb18..6cf7904546aba 100644 --- a/tests/providers/openlineage/utils/test_utils.py +++ b/providers/tests/openlineage/utils/test_utils.py @@ -43,8 +43,9 @@ from airflow.serialization.serialized_objects import SerializedBaseOperator from airflow.utils.task_group import TaskGroup from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS, BashOperator -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, BashOperator +from dev.tests_common.test_utils.mock_operators import MockOperator BASH_OPERATOR_PATH = "airflow.providers.standard.operators.bash" if not AIRFLOW_V_2_10_PLUS: @@ -190,7 +191,7 @@ def test_get_fully_qualified_class_name_mapped_operator(): mapped = MockOperator.partial(task_id="task_2").expand(arg2=["a", "b", "c"]) assert isinstance(mapped, MappedOperator) mapped_op_path = get_fully_qualified_class_name(mapped) - assert mapped_op_path == "tests.test_utils.mock_operators.MockOperator" + assert mapped_op_path == "dev.tests_common.test_utils.mock_operators.MockOperator" def test_get_fully_qualified_class_name_bash_operator(): @@ -309,7 +310,7 @@ def sum_values(values: list[int]) -> int: "downstream_task_ids": [], }, "task": { - "operator": "tests.providers.openlineage.utils.test_utils.CustomOperatorForTest", + "operator": "providers.tests.openlineage.utils.test_utils.CustomOperatorForTest", "task_group": None, "emits_ol_events": True, "ui_color": CustomOperatorForTest.ui_color, @@ -337,7 +338,7 @@ def sum_values(values: list[int]) -> int: ], }, "task_1": { - "operator": "tests.providers.openlineage.utils.test_utils.CustomOperatorFromEmpty", + "operator": "providers.tests.openlineage.utils.test_utils.CustomOperatorFromEmpty", "task_group": None, "emits_ol_events": False, "ui_color": CustomOperatorFromEmpty.ui_color, @@ -406,7 +407,7 @@ def sum_values(values: list[int]) -> int: "emits_ol_events": True, "is_setup": False, "is_teardown": False, - "operator": "tests.providers.openlineage.utils.test_utils.TestMappedOperator", + "operator": "providers.tests.openlineage.utils.test_utils.TestMappedOperator", "task_group": None, "ui_color": "#fff", "ui_fgcolor": "#000", @@ -597,7 +598,7 @@ def test_get_user_provided_run_facets_with_no_function_definition(mock_custom_fa @patch( "airflow.providers.openlineage.conf.custom_run_facets", - return_value={"tests.providers.openlineage.utils.custom_facet_fixture.get_additional_test_facet"}, + return_value={"providers.tests.openlineage.utils.custom_facet_fixture.get_additional_test_facet"}, ) def test_get_user_provided_run_facets_with_function_definition(mock_custom_facet_funcs): sample_ti = TaskInstance( @@ -615,7 +616,7 @@ def test_get_user_provided_run_facets_with_function_definition(mock_custom_facet @patch( "airflow.providers.openlineage.conf.custom_run_facets", return_value={ - "tests.providers.openlineage.utils.custom_facet_fixture.get_additional_test_facet", + "providers.tests.openlineage.utils.custom_facet_fixture.get_additional_test_facet", }, ) def test_get_user_provided_run_facets_with_return_value_as_none(mock_custom_facet_funcs): @@ -635,9 +636,9 @@ def test_get_user_provided_run_facets_with_return_value_as_none(mock_custom_face "airflow.providers.openlineage.conf.custom_run_facets", return_value={ "invalid_function", - "tests.providers.openlineage.utils.custom_facet_fixture.get_additional_test_facet", - "tests.providers.openlineage.utils.custom_facet_fixture.return_type_is_not_dict", - "tests.providers.openlineage.utils.custom_facet_fixture.get_another_test_facet", + "providers.tests.openlineage.utils.custom_facet_fixture.get_additional_test_facet", + "providers.tests.openlineage.utils.custom_facet_fixture.return_type_is_not_dict", + "providers.tests.openlineage.utils.custom_facet_fixture.get_another_test_facet", }, ) def test_get_user_provided_run_facets_with_multiple_function_definition(mock_custom_facet_funcs): @@ -657,8 +658,8 @@ def test_get_user_provided_run_facets_with_multiple_function_definition(mock_cus @patch( "airflow.providers.openlineage.conf.custom_run_facets", return_value={ - "tests.providers.openlineage.utils.custom_facet_fixture.get_additional_test_facet", - "tests.providers.openlineage.utils.custom_facet_fixture.get_duplicate_test_facet_key", + "providers.tests.openlineage.utils.custom_facet_fixture.get_additional_test_facet", + "providers.tests.openlineage.utils.custom_facet_fixture.get_duplicate_test_facet_key", }, ) def test_get_user_provided_run_facets_with_duplicate_facet_keys(mock_custom_facet_funcs): @@ -691,7 +692,7 @@ def test_get_user_provided_run_facets_with_invalid_function_definition(mock_cust @patch( "airflow.providers.openlineage.conf.custom_run_facets", - return_value={"tests.providers.openlineage.utils.custom_facet_fixture.return_type_is_not_dict"}, + return_value={"providers.tests.openlineage.utils.custom_facet_fixture.return_type_is_not_dict"}, ) def test_get_user_provided_run_facets_with_wrong_return_type_function(mock_custom_facet_funcs): sample_ti = TaskInstance( @@ -706,7 +707,7 @@ def test_get_user_provided_run_facets_with_wrong_return_type_function(mock_custo @patch( "airflow.providers.openlineage.conf.custom_run_facets", - return_value={"tests.providers.openlineage.utils.custom_facet_fixture.get_custom_facet_throws_exception"}, + return_value={"providers.tests.openlineage.utils.custom_facet_fixture.get_custom_facet_throws_exception"}, ) def test_get_user_provided_run_facets_with_exception(mock_custom_facet_funcs): sample_ti = TaskInstance( diff --git a/tests/providers/opensearch/hooks/__init__.py b/providers/tests/opensearch/__init__.py similarity index 100% rename from tests/providers/opensearch/hooks/__init__.py rename to providers/tests/opensearch/__init__.py diff --git a/tests/providers/opensearch/conftest.py b/providers/tests/opensearch/conftest.py similarity index 100% rename from tests/providers/opensearch/conftest.py rename to providers/tests/opensearch/conftest.py diff --git a/tests/providers/opensearch/log/__init__.py b/providers/tests/opensearch/hooks/__init__.py similarity index 100% rename from tests/providers/opensearch/log/__init__.py rename to providers/tests/opensearch/hooks/__init__.py diff --git a/tests/providers/opensearch/hooks/test_opensearch.py b/providers/tests/opensearch/hooks/test_opensearch.py similarity index 100% rename from tests/providers/opensearch/hooks/test_opensearch.py rename to providers/tests/opensearch/hooks/test_opensearch.py diff --git a/tests/providers/opensearch/operators/__init__.py b/providers/tests/opensearch/log/__init__.py similarity index 100% rename from tests/providers/opensearch/operators/__init__.py rename to providers/tests/opensearch/log/__init__.py diff --git a/tests/providers/opensearch/log/test_os_json_formatter.py b/providers/tests/opensearch/log/test_os_json_formatter.py similarity index 100% rename from tests/providers/opensearch/log/test_os_json_formatter.py rename to providers/tests/opensearch/log/test_os_json_formatter.py diff --git a/tests/providers/opensearch/log/test_os_response.py b/providers/tests/opensearch/log/test_os_response.py similarity index 100% rename from tests/providers/opensearch/log/test_os_response.py rename to providers/tests/opensearch/log/test_os_response.py diff --git a/tests/providers/opensearch/log/test_os_task_handler.py b/providers/tests/opensearch/log/test_os_task_handler.py similarity index 99% rename from tests/providers/opensearch/log/test_os_task_handler.py rename to providers/tests/opensearch/log/test_os_task_handler.py index d23249ba9e6d9..1ffa0f5251191 100644 --- a/tests/providers/opensearch/log/test_os_task_handler.py +++ b/providers/tests/opensearch/log/test_os_task_handler.py @@ -43,9 +43,10 @@ from airflow.utils import timezone from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.timezone import datetime -from tests.providers.opensearch.conftest import MockClient -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs +from providers.tests.opensearch.conftest import MockClient pytestmark = pytest.mark.db_test diff --git a/tests/providers/opsgenie/notifications/__init__.py b/providers/tests/opensearch/operators/__init__.py similarity index 100% rename from tests/providers/opsgenie/notifications/__init__.py rename to providers/tests/opensearch/operators/__init__.py diff --git a/tests/providers/opensearch/operators/test_opensearch.py b/providers/tests/opensearch/operators/test_opensearch.py similarity index 100% rename from tests/providers/opensearch/operators/test_opensearch.py rename to providers/tests/opensearch/operators/test_opensearch.py diff --git a/tests/providers/openfaas/__init__.py b/providers/tests/opsgenie/__init__.py similarity index 100% rename from tests/providers/openfaas/__init__.py rename to providers/tests/opsgenie/__init__.py diff --git a/tests/providers/openfaas/hooks/__init__.py b/providers/tests/opsgenie/hooks/__init__.py similarity index 100% rename from tests/providers/openfaas/hooks/__init__.py rename to providers/tests/opsgenie/hooks/__init__.py diff --git a/tests/providers/opsgenie/hooks/test_opsgenie.py b/providers/tests/opsgenie/hooks/test_opsgenie.py similarity index 100% rename from tests/providers/opsgenie/hooks/test_opsgenie.py rename to providers/tests/opsgenie/hooks/test_opsgenie.py diff --git a/tests/providers/opsgenie/typing/__init__.py b/providers/tests/opsgenie/notifications/__init__.py similarity index 100% rename from tests/providers/opsgenie/typing/__init__.py rename to providers/tests/opsgenie/notifications/__init__.py diff --git a/tests/providers/opsgenie/notifications/test_opsgenie.py b/providers/tests/opsgenie/notifications/test_opsgenie.py similarity index 100% rename from tests/providers/opsgenie/notifications/test_opsgenie.py rename to providers/tests/opsgenie/notifications/test_opsgenie.py diff --git a/tests/providers/opsgenie/__init__.py b/providers/tests/opsgenie/operators/__init__.py similarity index 100% rename from tests/providers/opsgenie/__init__.py rename to providers/tests/opsgenie/operators/__init__.py diff --git a/tests/providers/opsgenie/operators/test_opsgenie.py b/providers/tests/opsgenie/operators/test_opsgenie.py similarity index 100% rename from tests/providers/opsgenie/operators/test_opsgenie.py rename to providers/tests/opsgenie/operators/test_opsgenie.py diff --git a/tests/providers/oracle/operators/__init__.py b/providers/tests/opsgenie/typing/__init__.py similarity index 100% rename from tests/providers/oracle/operators/__init__.py rename to providers/tests/opsgenie/typing/__init__.py diff --git a/tests/providers/opsgenie/typing/test_opsgenie.py b/providers/tests/opsgenie/typing/test_opsgenie.py similarity index 100% rename from tests/providers/opsgenie/typing/test_opsgenie.py rename to providers/tests/opsgenie/typing/test_opsgenie.py diff --git a/tests/providers/opsgenie/hooks/__init__.py b/providers/tests/oracle/__init__.py similarity index 100% rename from tests/providers/opsgenie/hooks/__init__.py rename to providers/tests/oracle/__init__.py diff --git a/tests/providers/opsgenie/operators/__init__.py b/providers/tests/oracle/hooks/__init__.py similarity index 100% rename from tests/providers/opsgenie/operators/__init__.py rename to providers/tests/oracle/hooks/__init__.py diff --git a/tests/providers/oracle/hooks/test_oracle.py b/providers/tests/oracle/hooks/test_oracle.py similarity index 100% rename from tests/providers/oracle/hooks/test_oracle.py rename to providers/tests/oracle/hooks/test_oracle.py diff --git a/tests/providers/pagerduty/notifications/__init__.py b/providers/tests/oracle/operators/__init__.py similarity index 100% rename from tests/providers/pagerduty/notifications/__init__.py rename to providers/tests/oracle/operators/__init__.py diff --git a/tests/providers/oracle/operators/test_oracle.py b/providers/tests/oracle/operators/test_oracle.py similarity index 100% rename from tests/providers/oracle/operators/test_oracle.py rename to providers/tests/oracle/operators/test_oracle.py diff --git a/tests/providers/oracle/__init__.py b/providers/tests/oracle/transfers/__init__.py similarity index 100% rename from tests/providers/oracle/__init__.py rename to providers/tests/oracle/transfers/__init__.py diff --git a/tests/providers/oracle/transfers/test_oracle_to_oracle.py b/providers/tests/oracle/transfers/test_oracle_to_oracle.py similarity index 100% rename from tests/providers/oracle/transfers/test_oracle_to_oracle.py rename to providers/tests/oracle/transfers/test_oracle_to_oracle.py diff --git a/tests/providers/oracle/hooks/__init__.py b/providers/tests/pagerduty/__init__.py similarity index 100% rename from tests/providers/oracle/hooks/__init__.py rename to providers/tests/pagerduty/__init__.py diff --git a/tests/providers/oracle/transfers/__init__.py b/providers/tests/pagerduty/hooks/__init__.py similarity index 100% rename from tests/providers/oracle/transfers/__init__.py rename to providers/tests/pagerduty/hooks/__init__.py diff --git a/tests/providers/pagerduty/hooks/test_pagerduty.py b/providers/tests/pagerduty/hooks/test_pagerduty.py similarity index 100% rename from tests/providers/pagerduty/hooks/test_pagerduty.py rename to providers/tests/pagerduty/hooks/test_pagerduty.py diff --git a/tests/providers/pagerduty/hooks/test_pagerduty_events.py b/providers/tests/pagerduty/hooks/test_pagerduty_events.py similarity index 100% rename from tests/providers/pagerduty/hooks/test_pagerduty_events.py rename to providers/tests/pagerduty/hooks/test_pagerduty_events.py diff --git a/tests/providers/papermill/hooks/__init__.py b/providers/tests/pagerduty/notifications/__init__.py similarity index 100% rename from tests/providers/papermill/hooks/__init__.py rename to providers/tests/pagerduty/notifications/__init__.py diff --git a/tests/providers/pagerduty/notifications/test_pagerduty.py b/providers/tests/pagerduty/notifications/test_pagerduty.py similarity index 100% rename from tests/providers/pagerduty/notifications/test_pagerduty.py rename to providers/tests/pagerduty/notifications/test_pagerduty.py diff --git a/tests/providers/pagerduty/__init__.py b/providers/tests/papermill/__init__.py similarity index 100% rename from tests/providers/pagerduty/__init__.py rename to providers/tests/papermill/__init__.py diff --git a/tests/providers/pgvector/__init__.py b/providers/tests/papermill/hooks/__init__.py similarity index 100% rename from tests/providers/pgvector/__init__.py rename to providers/tests/papermill/hooks/__init__.py diff --git a/tests/providers/papermill/hooks/test_kernel.py b/providers/tests/papermill/hooks/test_kernel.py similarity index 100% rename from tests/providers/papermill/hooks/test_kernel.py rename to providers/tests/papermill/hooks/test_kernel.py diff --git a/tests/providers/pagerduty/hooks/__init__.py b/providers/tests/papermill/operators/__init__.py similarity index 100% rename from tests/providers/pagerduty/hooks/__init__.py rename to providers/tests/papermill/operators/__init__.py diff --git a/tests/providers/papermill/operators/test_papermill.py b/providers/tests/papermill/operators/test_papermill.py similarity index 100% rename from tests/providers/papermill/operators/test_papermill.py rename to providers/tests/papermill/operators/test_papermill.py diff --git a/tests/providers/pgvector/hooks/__init__.py b/providers/tests/pgvector/__init__.py similarity index 100% rename from tests/providers/pgvector/hooks/__init__.py rename to providers/tests/pgvector/__init__.py diff --git a/tests/providers/pgvector/operators/__init__.py b/providers/tests/pgvector/hooks/__init__.py similarity index 100% rename from tests/providers/pgvector/operators/__init__.py rename to providers/tests/pgvector/hooks/__init__.py diff --git a/tests/providers/pgvector/hooks/test_pgvector.py b/providers/tests/pgvector/hooks/test_pgvector.py similarity index 100% rename from tests/providers/pgvector/hooks/test_pgvector.py rename to providers/tests/pgvector/hooks/test_pgvector.py diff --git a/tests/providers/pinecone/__init__.py b/providers/tests/pgvector/operators/__init__.py similarity index 100% rename from tests/providers/pinecone/__init__.py rename to providers/tests/pgvector/operators/__init__.py diff --git a/tests/providers/pgvector/operators/test_pgvector.py b/providers/tests/pgvector/operators/test_pgvector.py similarity index 100% rename from tests/providers/pgvector/operators/test_pgvector.py rename to providers/tests/pgvector/operators/test_pgvector.py diff --git a/tests/providers/pinecone/hooks/__init__.py b/providers/tests/pinecone/__init__.py similarity index 100% rename from tests/providers/pinecone/hooks/__init__.py rename to providers/tests/pinecone/__init__.py diff --git a/tests/providers/pinecone/operators/__init__.py b/providers/tests/pinecone/hooks/__init__.py similarity index 100% rename from tests/providers/pinecone/operators/__init__.py rename to providers/tests/pinecone/hooks/__init__.py diff --git a/tests/providers/pinecone/hooks/test_pinecone.py b/providers/tests/pinecone/hooks/test_pinecone.py similarity index 100% rename from tests/providers/pinecone/hooks/test_pinecone.py rename to providers/tests/pinecone/hooks/test_pinecone.py diff --git a/tests/providers/postgres/assets/__init__.py b/providers/tests/pinecone/operators/__init__.py similarity index 100% rename from tests/providers/postgres/assets/__init__.py rename to providers/tests/pinecone/operators/__init__.py diff --git a/tests/providers/pinecone/operators/test_pinecone.py b/providers/tests/pinecone/operators/test_pinecone.py similarity index 100% rename from tests/providers/pinecone/operators/test_pinecone.py rename to providers/tests/pinecone/operators/test_pinecone.py diff --git a/tests/providers/papermill/__init__.py b/providers/tests/postgres/__init__.py similarity index 100% rename from tests/providers/papermill/__init__.py rename to providers/tests/postgres/__init__.py diff --git a/tests/providers/qdrant/__init__.py b/providers/tests/postgres/assets/__init__.py similarity index 100% rename from tests/providers/qdrant/__init__.py rename to providers/tests/postgres/assets/__init__.py diff --git a/tests/providers/postgres/assets/test_postgres.py b/providers/tests/postgres/assets/test_postgres.py similarity index 100% rename from tests/providers/postgres/assets/test_postgres.py rename to providers/tests/postgres/assets/test_postgres.py diff --git a/tests/providers/papermill/operators/__init__.py b/providers/tests/postgres/hooks/__init__.py similarity index 100% rename from tests/providers/papermill/operators/__init__.py rename to providers/tests/postgres/hooks/__init__.py diff --git a/tests/providers/postgres/hooks/test_postgres.py b/providers/tests/postgres/hooks/test_postgres.py similarity index 100% rename from tests/providers/postgres/hooks/test_postgres.py rename to providers/tests/postgres/hooks/test_postgres.py diff --git a/tests/providers/postgres/__init__.py b/providers/tests/postgres/operators/__init__.py similarity index 100% rename from tests/providers/postgres/__init__.py rename to providers/tests/postgres/operators/__init__.py diff --git a/tests/providers/postgres/operators/test_postgres.py b/providers/tests/postgres/operators/test_postgres.py similarity index 100% rename from tests/providers/postgres/operators/test_postgres.py rename to providers/tests/postgres/operators/test_postgres.py diff --git a/tests/providers/postgres/hooks/__init__.py b/providers/tests/presto/__init__.py similarity index 100% rename from tests/providers/postgres/hooks/__init__.py rename to providers/tests/presto/__init__.py diff --git a/tests/providers/postgres/operators/__init__.py b/providers/tests/presto/hooks/__init__.py similarity index 100% rename from tests/providers/postgres/operators/__init__.py rename to providers/tests/presto/hooks/__init__.py diff --git a/tests/providers/presto/hooks/test_presto.py b/providers/tests/presto/hooks/test_presto.py similarity index 100% rename from tests/providers/presto/hooks/test_presto.py rename to providers/tests/presto/hooks/test_presto.py diff --git a/tests/providers/presto/__init__.py b/providers/tests/presto/transfers/__init__.py similarity index 100% rename from tests/providers/presto/__init__.py rename to providers/tests/presto/transfers/__init__.py diff --git a/tests/providers/presto/transfers/test_gcs_to_presto.py b/providers/tests/presto/transfers/test_gcs_to_presto.py similarity index 100% rename from tests/providers/presto/transfers/test_gcs_to_presto.py rename to providers/tests/presto/transfers/test_gcs_to_presto.py diff --git a/tests/providers/qdrant/hooks/__init__.py b/providers/tests/qdrant/__init__.py similarity index 100% rename from tests/providers/qdrant/hooks/__init__.py rename to providers/tests/qdrant/__init__.py diff --git a/tests/providers/qdrant/operators/__init__.py b/providers/tests/qdrant/hooks/__init__.py similarity index 100% rename from tests/providers/qdrant/operators/__init__.py rename to providers/tests/qdrant/hooks/__init__.py diff --git a/tests/providers/qdrant/hooks/test_qdrant.py b/providers/tests/qdrant/hooks/test_qdrant.py similarity index 100% rename from tests/providers/qdrant/hooks/test_qdrant.py rename to providers/tests/qdrant/hooks/test_qdrant.py diff --git a/tests/providers/salesforce/operators/__init__.py b/providers/tests/qdrant/operators/__init__.py similarity index 100% rename from tests/providers/salesforce/operators/__init__.py rename to providers/tests/qdrant/operators/__init__.py diff --git a/tests/providers/qdrant/operators/test_qdrant.py b/providers/tests/qdrant/operators/test_qdrant.py similarity index 100% rename from tests/providers/qdrant/operators/test_qdrant.py rename to providers/tests/qdrant/operators/test_qdrant.py diff --git a/tests/providers/presto/hooks/__init__.py b/providers/tests/redis/__init__.py similarity index 100% rename from tests/providers/presto/hooks/__init__.py rename to providers/tests/redis/__init__.py diff --git a/tests/providers/presto/transfers/__init__.py b/providers/tests/redis/hooks/__init__.py similarity index 100% rename from tests/providers/presto/transfers/__init__.py rename to providers/tests/redis/hooks/__init__.py diff --git a/tests/providers/redis/hooks/test_redis.py b/providers/tests/redis/hooks/test_redis.py similarity index 100% rename from tests/providers/redis/hooks/test_redis.py rename to providers/tests/redis/hooks/test_redis.py diff --git a/tests/providers/redis/__init__.py b/providers/tests/redis/log/__init__.py similarity index 100% rename from tests/providers/redis/__init__.py rename to providers/tests/redis/log/__init__.py diff --git a/tests/providers/redis/log/test_redis_task_handler.py b/providers/tests/redis/log/test_redis_task_handler.py similarity index 98% rename from tests/providers/redis/log/test_redis_task_handler.py rename to providers/tests/redis/log/test_redis_task_handler.py index f4ded2fa586ae..bc7345df9a53b 100644 --- a/tests/providers/redis/log/test_redis_task_handler.py +++ b/providers/tests/redis/log/test_redis_task_handler.py @@ -28,7 +28,8 @@ from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/redis/hooks/__init__.py b/providers/tests/redis/operators/__init__.py similarity index 100% rename from tests/providers/redis/hooks/__init__.py rename to providers/tests/redis/operators/__init__.py diff --git a/tests/providers/redis/operators/test_redis_publish.py b/providers/tests/redis/operators/test_redis_publish.py similarity index 100% rename from tests/providers/redis/operators/test_redis_publish.py rename to providers/tests/redis/operators/test_redis_publish.py diff --git a/tests/providers/redis/log/__init__.py b/providers/tests/redis/sensors/__init__.py similarity index 100% rename from tests/providers/redis/log/__init__.py rename to providers/tests/redis/sensors/__init__.py diff --git a/tests/providers/redis/sensors/test_redis_key.py b/providers/tests/redis/sensors/test_redis_key.py similarity index 100% rename from tests/providers/redis/sensors/test_redis_key.py rename to providers/tests/redis/sensors/test_redis_key.py diff --git a/tests/providers/redis/sensors/test_redis_pub_sub.py b/providers/tests/redis/sensors/test_redis_pub_sub.py similarity index 100% rename from tests/providers/redis/sensors/test_redis_pub_sub.py rename to providers/tests/redis/sensors/test_redis_pub_sub.py diff --git a/tests/providers/redis/operators/__init__.py b/providers/tests/salesforce/__init__.py similarity index 100% rename from tests/providers/redis/operators/__init__.py rename to providers/tests/salesforce/__init__.py diff --git a/tests/providers/redis/sensors/__init__.py b/providers/tests/salesforce/hooks/__init__.py similarity index 100% rename from tests/providers/redis/sensors/__init__.py rename to providers/tests/salesforce/hooks/__init__.py diff --git a/tests/providers/salesforce/hooks/test_salesforce.py b/providers/tests/salesforce/hooks/test_salesforce.py similarity index 100% rename from tests/providers/salesforce/hooks/test_salesforce.py rename to providers/tests/salesforce/hooks/test_salesforce.py diff --git a/tests/providers/sendgrid/__init__.py b/providers/tests/salesforce/operators/__init__.py similarity index 100% rename from tests/providers/sendgrid/__init__.py rename to providers/tests/salesforce/operators/__init__.py diff --git a/tests/providers/salesforce/operators/test_bulk.py b/providers/tests/salesforce/operators/test_bulk.py similarity index 100% rename from tests/providers/salesforce/operators/test_bulk.py rename to providers/tests/salesforce/operators/test_bulk.py diff --git a/tests/providers/salesforce/operators/test_salesforce_apex_rest.py b/providers/tests/salesforce/operators/test_salesforce_apex_rest.py similarity index 100% rename from tests/providers/salesforce/operators/test_salesforce_apex_rest.py rename to providers/tests/salesforce/operators/test_salesforce_apex_rest.py diff --git a/tests/providers/salesforce/__init__.py b/providers/tests/samba/__init__.py similarity index 100% rename from tests/providers/salesforce/__init__.py rename to providers/tests/samba/__init__.py diff --git a/tests/providers/salesforce/hooks/__init__.py b/providers/tests/samba/hooks/__init__.py similarity index 100% rename from tests/providers/salesforce/hooks/__init__.py rename to providers/tests/samba/hooks/__init__.py diff --git a/tests/providers/samba/hooks/test_samba.py b/providers/tests/samba/hooks/test_samba.py similarity index 100% rename from tests/providers/samba/hooks/test_samba.py rename to providers/tests/samba/hooks/test_samba.py diff --git a/tests/providers/samba/__init__.py b/providers/tests/samba/transfers/__init__.py similarity index 100% rename from tests/providers/samba/__init__.py rename to providers/tests/samba/transfers/__init__.py diff --git a/tests/providers/samba/transfers/test_gcs_to_samba.py b/providers/tests/samba/transfers/test_gcs_to_samba.py similarity index 100% rename from tests/providers/samba/transfers/test_gcs_to_samba.py rename to providers/tests/samba/transfers/test_gcs_to_samba.py diff --git a/tests/providers/samba/hooks/__init__.py b/providers/tests/segment/__init__.py similarity index 100% rename from tests/providers/samba/hooks/__init__.py rename to providers/tests/segment/__init__.py diff --git a/tests/providers/samba/transfers/__init__.py b/providers/tests/segment/hooks/__init__.py similarity index 100% rename from tests/providers/samba/transfers/__init__.py rename to providers/tests/segment/hooks/__init__.py diff --git a/tests/providers/segment/hooks/test_segment.py b/providers/tests/segment/hooks/test_segment.py similarity index 100% rename from tests/providers/segment/hooks/test_segment.py rename to providers/tests/segment/hooks/test_segment.py diff --git a/tests/providers/segment/__init__.py b/providers/tests/segment/operators/__init__.py similarity index 100% rename from tests/providers/segment/__init__.py rename to providers/tests/segment/operators/__init__.py diff --git a/tests/providers/segment/operators/test_segment_track_event.py b/providers/tests/segment/operators/test_segment_track_event.py similarity index 100% rename from tests/providers/segment/operators/test_segment_track_event.py rename to providers/tests/segment/operators/test_segment_track_event.py diff --git a/tests/providers/sendgrid/utils/__init__.py b/providers/tests/sendgrid/__init__.py similarity index 100% rename from tests/providers/sendgrid/utils/__init__.py rename to providers/tests/sendgrid/__init__.py diff --git a/tests/providers/sftp/__init__.py b/providers/tests/sendgrid/utils/__init__.py similarity index 100% rename from tests/providers/sftp/__init__.py rename to providers/tests/sendgrid/utils/__init__.py diff --git a/tests/providers/sendgrid/utils/test_emailer.py b/providers/tests/sendgrid/utils/test_emailer.py similarity index 100% rename from tests/providers/sendgrid/utils/test_emailer.py rename to providers/tests/sendgrid/utils/test_emailer.py diff --git a/tests/providers/sftp/decorators/__init__.py b/providers/tests/sftp/__init__.py similarity index 100% rename from tests/providers/sftp/decorators/__init__.py rename to providers/tests/sftp/__init__.py diff --git a/tests/providers/sftp/decorators/sensors/__init__.py b/providers/tests/sftp/decorators/__init__.py similarity index 100% rename from tests/providers/sftp/decorators/sensors/__init__.py rename to providers/tests/sftp/decorators/__init__.py diff --git a/tests/providers/sftp/hooks/__init__.py b/providers/tests/sftp/decorators/sensors/__init__.py similarity index 100% rename from tests/providers/sftp/hooks/__init__.py rename to providers/tests/sftp/decorators/sensors/__init__.py diff --git a/tests/providers/sftp/decorators/sensors/test_sftp.py b/providers/tests/sftp/decorators/sensors/test_sftp.py similarity index 100% rename from tests/providers/sftp/decorators/sensors/test_sftp.py rename to providers/tests/sftp/decorators/sensors/test_sftp.py diff --git a/tests/providers/sftp/operators/__init__.py b/providers/tests/sftp/hooks/__init__.py similarity index 100% rename from tests/providers/sftp/operators/__init__.py rename to providers/tests/sftp/hooks/__init__.py diff --git a/tests/providers/sftp/hooks/test_sftp.py b/providers/tests/sftp/hooks/test_sftp.py similarity index 99% rename from tests/providers/sftp/hooks/test_sftp.py rename to providers/tests/sftp/hooks/test_sftp.py index 3305f9cb761a1..7a7a2991a7031 100644 --- a/tests/providers/sftp/hooks/test_sftp.py +++ b/providers/tests/sftp/hooks/test_sftp.py @@ -406,8 +406,9 @@ def test_deprecation_ftp_conn_id(self, mock_get_connection): def test_invalid_ssh_hook(self, mock_get_connection): connection = Connection(conn_id="sftp_default", login="root", host="localhost") mock_get_connection.return_value = connection - with pytest.raises(AirflowException, match="ssh_hook must be an instance of SSHHook"), pytest.warns( - AirflowProviderDeprecationWarning, match=r"Parameter `ssh_hook` is deprecated.*" + with ( + pytest.raises(AirflowException, match="ssh_hook must be an instance of SSHHook"), + pytest.warns(AirflowProviderDeprecationWarning, match=r"Parameter `ssh_hook` is deprecated.*"), ): SFTPHook(ssh_hook="invalid_hook") diff --git a/tests/providers/sftp/sensors/__init__.py b/providers/tests/sftp/operators/__init__.py similarity index 100% rename from tests/providers/sftp/sensors/__init__.py rename to providers/tests/sftp/operators/__init__.py diff --git a/tests/providers/sftp/operators/test_sftp.py b/providers/tests/sftp/operators/test_sftp.py similarity index 97% rename from tests/providers/sftp/operators/test_sftp.py rename to providers/tests/sftp/operators/test_sftp.py index a6835675da26f..e6c90c2c74f5d 100644 --- a/tests/providers/sftp/operators/test_sftp.py +++ b/providers/tests/sftp/operators/test_sftp.py @@ -35,7 +35,8 @@ from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils import timezone from airflow.utils.timezone import datetime -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test @@ -151,8 +152,9 @@ def test_file_transfer_no_intermediate_dir_error_put(self, create_task_instance_ operation=SFTPOperation.PUT, create_intermediate_dirs=False, ) - with pytest.raises(AirflowException) as ctx, pytest.warns( - AirflowProviderDeprecationWarning, match="Parameter `ssh_hook` is deprecated..*" + with ( + pytest.raises(AirflowException) as ctx, + pytest.warns(AirflowProviderDeprecationWarning, match="Parameter `ssh_hook` is deprecated..*"), ): ti2.run() assert "No such file" in str(ctx.value) @@ -292,8 +294,11 @@ def test_file_transfer_no_intermediate_dir_error_get(self, dag_maker, create_rem for ti in dag_maker.create_dagrun(execution_date=timezone.utcnow()).task_instances: # This should raise an error with "No such file" as the directory # does not exist. - with pytest.raises(AirflowException) as ctx, pytest.warns( - AirflowProviderDeprecationWarning, match="Parameter `ssh_hook` is deprecated..*" + with ( + pytest.raises(AirflowException) as ctx, + pytest.warns( + AirflowProviderDeprecationWarning, match="Parameter `ssh_hook` is deprecated..*" + ), ): ti.run() assert "No such file" in str(ctx.value) @@ -376,8 +381,9 @@ def test_arg_checking(self): operation=SFTPOperation.PUT, dag=dag, ) - with contextlib.suppress(Exception), pytest.warns( - AirflowProviderDeprecationWarning, match="Parameter `ssh_hook` is deprecated..*" + with ( + contextlib.suppress(Exception), + pytest.warns(AirflowProviderDeprecationWarning, match="Parameter `ssh_hook` is deprecated..*"), ): task_3.execute(None) assert task_3.sftp_hook.ssh_conn_id == self.hook.ssh_conn_id diff --git a/tests/providers/sftp/triggers/__init__.py b/providers/tests/sftp/sensors/__init__.py similarity index 100% rename from tests/providers/sftp/triggers/__init__.py rename to providers/tests/sftp/sensors/__init__.py diff --git a/tests/providers/sftp/sensors/test_sftp.py b/providers/tests/sftp/sensors/test_sftp.py similarity index 100% rename from tests/providers/sftp/sensors/test_sftp.py rename to providers/tests/sftp/sensors/test_sftp.py diff --git a/tests/providers/slack/notifications/__init__.py b/providers/tests/sftp/triggers/__init__.py similarity index 100% rename from tests/providers/slack/notifications/__init__.py rename to providers/tests/sftp/triggers/__init__.py diff --git a/tests/providers/sftp/triggers/test_sftp.py b/providers/tests/sftp/triggers/test_sftp.py similarity index 100% rename from tests/providers/sftp/triggers/test_sftp.py rename to providers/tests/sftp/triggers/test_sftp.py diff --git a/tests/providers/segment/hooks/__init__.py b/providers/tests/singularity/__init__.py similarity index 100% rename from tests/providers/segment/hooks/__init__.py rename to providers/tests/singularity/__init__.py diff --git a/tests/providers/segment/operators/__init__.py b/providers/tests/singularity/operators/__init__.py similarity index 100% rename from tests/providers/segment/operators/__init__.py rename to providers/tests/singularity/operators/__init__.py diff --git a/tests/providers/singularity/operators/test_singularity.py b/providers/tests/singularity/operators/test_singularity.py similarity index 100% rename from tests/providers/singularity/operators/test_singularity.py rename to providers/tests/singularity/operators/test_singularity.py diff --git a/tests/providers/singularity/__init__.py b/providers/tests/slack/__init__.py similarity index 100% rename from tests/providers/singularity/__init__.py rename to providers/tests/slack/__init__.py diff --git a/tests/providers/singularity/operators/__init__.py b/providers/tests/slack/hooks/__init__.py similarity index 100% rename from tests/providers/singularity/operators/__init__.py rename to providers/tests/slack/hooks/__init__.py diff --git a/tests/providers/slack/hooks/test_slack.py b/providers/tests/slack/hooks/test_slack.py similarity index 100% rename from tests/providers/slack/hooks/test_slack.py rename to providers/tests/slack/hooks/test_slack.py diff --git a/tests/providers/slack/hooks/test_slack_webhook.py b/providers/tests/slack/hooks/test_slack_webhook.py similarity index 100% rename from tests/providers/slack/hooks/test_slack_webhook.py rename to providers/tests/slack/hooks/test_slack_webhook.py diff --git a/tests/providers/slack/utils/__init__.py b/providers/tests/slack/notifications/__init__.py similarity index 100% rename from tests/providers/slack/utils/__init__.py rename to providers/tests/slack/notifications/__init__.py diff --git a/tests/providers/slack/notifications/test_slack.py b/providers/tests/slack/notifications/test_slack.py similarity index 100% rename from tests/providers/slack/notifications/test_slack.py rename to providers/tests/slack/notifications/test_slack.py diff --git a/tests/providers/slack/notifications/test_slack_webhook.py b/providers/tests/slack/notifications/test_slack_webhook.py similarity index 100% rename from tests/providers/slack/notifications/test_slack_webhook.py rename to providers/tests/slack/notifications/test_slack_webhook.py diff --git a/tests/providers/slack/__init__.py b/providers/tests/slack/operators/__init__.py similarity index 100% rename from tests/providers/slack/__init__.py rename to providers/tests/slack/operators/__init__.py diff --git a/tests/providers/slack/operators/test_slack.py b/providers/tests/slack/operators/test_slack.py similarity index 98% rename from tests/providers/slack/operators/test_slack.py rename to providers/tests/slack/operators/test_slack.py index 603cef7835ded..f276ee43159e6 100644 --- a/tests/providers/slack/operators/test_slack.py +++ b/providers/tests/slack/operators/test_slack.py @@ -362,7 +362,8 @@ def test_partial_both_channel_parameters(self, channel, channels, dag_maker, ses with set_current_task_instance_session(session=session): warning_match = r"Argument `channel` is deprecated.*use `channels` instead" for ti in tis: - with pytest.warns(AirflowProviderDeprecationWarning, match=warning_match), pytest.raises( - ValueError, match="Cannot set both arguments" + with ( + pytest.warns(AirflowProviderDeprecationWarning, match=warning_match), + pytest.raises(ValueError, match="Cannot set both arguments"), ): ti.render_templates() diff --git a/tests/providers/slack/operators/test_slack_webhook.py b/providers/tests/slack/operators/test_slack_webhook.py similarity index 100% rename from tests/providers/slack/operators/test_slack_webhook.py rename to providers/tests/slack/operators/test_slack_webhook.py diff --git a/tests/providers/slack/hooks/__init__.py b/providers/tests/slack/transfers/__init__.py similarity index 100% rename from tests/providers/slack/hooks/__init__.py rename to providers/tests/slack/transfers/__init__.py diff --git a/tests/providers/slack/transfers/conftest.py b/providers/tests/slack/transfers/conftest.py similarity index 100% rename from tests/providers/slack/transfers/conftest.py rename to providers/tests/slack/transfers/conftest.py diff --git a/tests/providers/slack/transfers/test_base_sql_to_slack.py b/providers/tests/slack/transfers/test_base_sql_to_slack.py similarity index 100% rename from tests/providers/slack/transfers/test_base_sql_to_slack.py rename to providers/tests/slack/transfers/test_base_sql_to_slack.py diff --git a/tests/providers/slack/transfers/test_sql_to_slack.py b/providers/tests/slack/transfers/test_sql_to_slack.py similarity index 100% rename from tests/providers/slack/transfers/test_sql_to_slack.py rename to providers/tests/slack/transfers/test_sql_to_slack.py diff --git a/tests/providers/slack/transfers/test_sql_to_slack_webhook.py b/providers/tests/slack/transfers/test_sql_to_slack_webhook.py similarity index 96% rename from tests/providers/slack/transfers/test_sql_to_slack_webhook.py rename to providers/tests/slack/transfers/test_sql_to_slack_webhook.py index 2f6ef63bc687f..c56f895b7bc78 100644 --- a/tests/providers/slack/transfers/test_sql_to_slack_webhook.py +++ b/providers/tests/slack/transfers/test_sql_to_slack_webhook.py @@ -164,8 +164,9 @@ def test_conflicting_conn_id(self): "slack_message": "message: {{ ds }}, {{ xxxx }}", "sql": "sql {{ ds }}", } - with pytest.raises(ValueError, match="Conflicting Connection ids provided"), pytest.warns( - AirflowProviderDeprecationWarning, match="Parameter `slack_conn_id` is deprecated" + with ( + pytest.raises(ValueError, match="Conflicting Connection ids provided"), + pytest.warns(AirflowProviderDeprecationWarning, match="Parameter `slack_conn_id` is deprecated"), ): self._construct_operator(**operator_args, slack_webhook_conn_id="foo", slack_conn_id="bar") @@ -312,7 +313,8 @@ def test_partial_ambiguous_slack_connections(self, dag_maker, session): with set_current_task_instance_session(session=session): warning_match = r"Parameter `slack_conn_id` is deprecated" for ti in tis: - with pytest.warns(AirflowProviderDeprecationWarning, match=warning_match), pytest.raises( - ValueError, match="Conflicting Connection ids provided" + with ( + pytest.warns(AirflowProviderDeprecationWarning, match=warning_match), + pytest.raises(ValueError, match="Conflicting Connection ids provided"), ): ti.render_templates() diff --git a/tests/providers/smtp/notifications/__init__.py b/providers/tests/slack/utils/__init__.py similarity index 100% rename from tests/providers/smtp/notifications/__init__.py rename to providers/tests/slack/utils/__init__.py diff --git a/tests/providers/slack/utils/test_utils.py b/providers/tests/slack/utils/test_utils.py similarity index 100% rename from tests/providers/slack/utils/test_utils.py rename to providers/tests/slack/utils/test_utils.py diff --git a/tests/providers/slack/operators/__init__.py b/providers/tests/smtp/__init__.py similarity index 100% rename from tests/providers/slack/operators/__init__.py rename to providers/tests/smtp/__init__.py diff --git a/tests/providers/slack/transfers/__init__.py b/providers/tests/smtp/hooks/__init__.py similarity index 100% rename from tests/providers/slack/transfers/__init__.py rename to providers/tests/smtp/hooks/__init__.py diff --git a/tests/providers/smtp/hooks/test_smtp.py b/providers/tests/smtp/hooks/test_smtp.py similarity index 99% rename from tests/providers/smtp/hooks/test_smtp.py rename to providers/tests/smtp/hooks/test_smtp.py index 505fc303051ef..04a20e0ca6f0a 100644 --- a/tests/providers/smtp/hooks/test_smtp.py +++ b/providers/tests/smtp/hooks/test_smtp.py @@ -30,7 +30,8 @@ from airflow.providers.smtp.hooks.smtp import SmtpHook from airflow.utils import db from airflow.utils.session import create_session -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/snowflake/__init__.py b/providers/tests/smtp/notifications/__init__.py similarity index 100% rename from tests/providers/snowflake/__init__.py rename to providers/tests/smtp/notifications/__init__.py diff --git a/tests/providers/smtp/notifications/test_smtp.py b/providers/tests/smtp/notifications/test_smtp.py similarity index 96% rename from tests/providers/smtp/notifications/test_smtp.py rename to providers/tests/smtp/notifications/test_smtp.py index 39b51e8e02ce7..75e1ebf530fae 100644 --- a/tests/providers/smtp/notifications/test_smtp.py +++ b/providers/tests/smtp/notifications/test_smtp.py @@ -31,8 +31,9 @@ send_smtp_notification, ) from airflow.utils import timezone -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test @@ -182,9 +183,10 @@ def test_notifier_with_nondefault_conf_vars(self, mock_smtphook_hook, create_tas ti = create_task_instance(dag_id="dag", task_id="op", execution_date=timezone.datetime(2018, 1, 1)) context = {"dag": ti.dag_run.dag, "ti": ti} - with tempfile.NamedTemporaryFile(mode="wt", suffix=".txt") as f_subject, tempfile.NamedTemporaryFile( - mode="wt", suffix=".txt" - ) as f_content: + with ( + tempfile.NamedTemporaryFile(mode="wt", suffix=".txt") as f_subject, + tempfile.NamedTemporaryFile(mode="wt", suffix=".txt") as f_content, + ): f_subject.write("Task {{ ti.task_id }} failed") f_subject.flush() diff --git a/tests/providers/smtp/__init__.py b/providers/tests/smtp/operators/__init__.py similarity index 100% rename from tests/providers/smtp/__init__.py rename to providers/tests/smtp/operators/__init__.py diff --git a/tests/providers/smtp/operators/test_smtp.py b/providers/tests/smtp/operators/test_smtp.py similarity index 100% rename from tests/providers/smtp/operators/test_smtp.py rename to providers/tests/smtp/operators/test_smtp.py diff --git a/tests/providers/snowflake/decorators/__init__.py b/providers/tests/snowflake/__init__.py similarity index 100% rename from tests/providers/snowflake/decorators/__init__.py rename to providers/tests/snowflake/__init__.py diff --git a/tests/providers/snowflake/hooks/__init__.py b/providers/tests/snowflake/decorators/__init__.py similarity index 100% rename from tests/providers/snowflake/hooks/__init__.py rename to providers/tests/snowflake/decorators/__init__.py diff --git a/tests/providers/snowflake/decorators/test_snowpark.py b/providers/tests/snowflake/decorators/test_snowpark.py similarity index 100% rename from tests/providers/snowflake/decorators/test_snowpark.py rename to providers/tests/snowflake/decorators/test_snowpark.py diff --git a/tests/providers/snowflake/operators/__init__.py b/providers/tests/snowflake/hooks/__init__.py similarity index 100% rename from tests/providers/snowflake/operators/__init__.py rename to providers/tests/snowflake/hooks/__init__.py diff --git a/tests/providers/snowflake/hooks/test_snowflake.py b/providers/tests/snowflake/hooks/test_snowflake.py similarity index 93% rename from tests/providers/snowflake/hooks/test_snowflake.py rename to providers/tests/snowflake/hooks/test_snowflake.py index 9ef0c4d2a5717..b7c9382654be0 100644 --- a/tests/providers/snowflake/hooks/test_snowflake.py +++ b/providers/tests/snowflake/hooks/test_snowflake.py @@ -394,9 +394,10 @@ def test_get_conn_params_should_support_private_auth_with_unencrypted_key( with mock.patch.dict("os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri()): assert "private_key" in SnowflakeHook(snowflake_conn_id="test_conn")._get_conn_params connection_kwargs["password"] = _PASSWORD - with mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), pytest.raises(TypeError, match="Password was given but private key is not encrypted."): + with ( + mock.patch.dict("os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri()), + pytest.raises(TypeError, match="Password was given but private key is not encrypted."), + ): SnowflakeHook(snowflake_conn_id="test_conn")._get_conn_params def test_get_conn_params_should_fail_on_invalid_key(self): @@ -412,9 +413,10 @@ def test_get_conn_params_should_fail_on_invalid_key(self): "private_key_file": "/dev/urandom", }, } - with mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), pytest.raises(ValueError, match="The private_key_file path points to an empty or invalid file."): + with ( + mock.patch.dict("os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri()), + pytest.raises(ValueError, match="The private_key_file path points to an empty or invalid file."), + ): SnowflakeHook(snowflake_conn_id="test_conn").get_conn() def test_should_add_partner_info(self): @@ -428,18 +430,24 @@ def test_should_add_partner_info(self): ) def test_get_conn_should_call_connect(self): - with mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**BASE_CONNECTION_KWARGS).get_uri() - ), mock.patch("airflow.providers.snowflake.hooks.snowflake.connector") as mock_connector: + with ( + mock.patch.dict( + "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**BASE_CONNECTION_KWARGS).get_uri() + ), + mock.patch("airflow.providers.snowflake.hooks.snowflake.connector") as mock_connector, + ): hook = SnowflakeHook(snowflake_conn_id="test_conn") conn = hook.get_conn() mock_connector.connect.assert_called_once_with(**hook._get_conn_params) assert mock_connector.connect.return_value == conn def test_get_sqlalchemy_engine_should_support_pass_auth(self): - with mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**BASE_CONNECTION_KWARGS).get_uri() - ), mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine: + with ( + mock.patch.dict( + "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**BASE_CONNECTION_KWARGS).get_uri() + ), + mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine, + ): hook = SnowflakeHook(snowflake_conn_id="test_conn") conn = hook.get_sqlalchemy_engine() mock_create_engine.assert_called_once_with( @@ -452,9 +460,10 @@ def test_get_sqlalchemy_engine_should_support_insecure_mode(self): connection_kwargs = deepcopy(BASE_CONNECTION_KWARGS) connection_kwargs["extra"]["extra__snowflake__insecure_mode"] = "True" - with mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine: + with ( + mock.patch.dict("os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri()), + mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine, + ): hook = SnowflakeHook(snowflake_conn_id="test_conn") conn = hook.get_sqlalchemy_engine() mock_create_engine.assert_called_once_with( @@ -468,9 +477,10 @@ def test_get_sqlalchemy_engine_should_support_session_parameters(self): connection_kwargs = deepcopy(BASE_CONNECTION_KWARGS) connection_kwargs["extra"]["session_parameters"] = {"TEST_PARAM": "AA", "TEST_PARAM_B": 123} - with mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine: + with ( + mock.patch.dict("os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri()), + mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine, + ): hook = SnowflakeHook(snowflake_conn_id="test_conn") conn = hook.get_sqlalchemy_engine() mock_create_engine.assert_called_once_with( @@ -485,9 +495,10 @@ def test_get_sqlalchemy_engine_should_support_private_key_auth(self, non_encrypt connection_kwargs["password"] = "" connection_kwargs["extra"]["private_key_file"] = str(non_encrypted_temporary_private_key) - with mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine: + with ( + mock.patch.dict("os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri()), + mock.patch("airflow.providers.snowflake.hooks.snowflake.create_engine") as mock_create_engine, + ): hook = SnowflakeHook(snowflake_conn_id="test_conn") conn = hook.get_sqlalchemy_engine() assert "private_key" in mock_create_engine.call_args.kwargs["connect_args"] diff --git a/tests/providers/snowflake/hooks/test_snowflake_sql_api.py b/providers/tests/snowflake/hooks/test_snowflake_sql_api.py similarity index 97% rename from tests/providers/snowflake/hooks/test_snowflake_sql_api.py rename to providers/tests/snowflake/hooks/test_snowflake_sql_api.py index df3f06db2f302..1247e3e031820 100644 --- a/tests/providers/snowflake/hooks/test_snowflake_sql_api.py +++ b/providers/tests/snowflake/hooks/test_snowflake_sql_api.py @@ -442,12 +442,15 @@ def test_get_private_key_raise_exception(self, encrypted_temporary_private_key: }, } hook = SnowflakeSqlApiHook(snowflake_conn_id="test_conn") - with unittest.mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), pytest.raises( - AirflowException, - match="The private_key_file and private_key_content extra fields are mutually " - "exclusive. Please remove one.", + with ( + unittest.mock.patch.dict( + "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() + ), + pytest.raises( + AirflowException, + match="The private_key_file and private_key_content extra fields are mutually " + "exclusive. Please remove one.", + ), ): hook.get_private_key() @@ -504,9 +507,12 @@ def test_get_private_key_should_support_private_auth_with_unencrypted_key( hook.get_private_key() assert hook.private_key is not None connection_kwargs["password"] = _PASSWORD - with unittest.mock.patch.dict( - "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() - ), pytest.raises(TypeError, match="Password was given but private key is not encrypted."): + with ( + unittest.mock.patch.dict( + "os.environ", AIRFLOW_CONN_TEST_CONN=Connection(**connection_kwargs).get_uri() + ), + pytest.raises(TypeError, match="Password was given but private key is not encrypted."), + ): SnowflakeSqlApiHook(snowflake_conn_id="test_conn").get_private_key() @pytest.mark.parametrize( diff --git a/tests/providers/snowflake/hooks/test_sql.py b/providers/tests/snowflake/hooks/test_sql.py similarity index 100% rename from tests/providers/snowflake/hooks/test_sql.py rename to providers/tests/snowflake/hooks/test_sql.py diff --git a/tests/providers/snowflake/transfers/__init__.py b/providers/tests/snowflake/operators/__init__.py similarity index 100% rename from tests/providers/snowflake/transfers/__init__.py rename to providers/tests/snowflake/operators/__init__.py diff --git a/tests/providers/snowflake/operators/test_snowflake.py b/providers/tests/snowflake/operators/test_snowflake.py similarity index 100% rename from tests/providers/snowflake/operators/test_snowflake.py rename to providers/tests/snowflake/operators/test_snowflake.py diff --git a/tests/providers/snowflake/operators/test_snowflake_sql.py b/providers/tests/snowflake/operators/test_snowflake_sql.py similarity index 100% rename from tests/providers/snowflake/operators/test_snowflake_sql.py rename to providers/tests/snowflake/operators/test_snowflake_sql.py diff --git a/tests/providers/snowflake/operators/test_snowpark.py b/providers/tests/snowflake/operators/test_snowpark.py similarity index 100% rename from tests/providers/snowflake/operators/test_snowpark.py rename to providers/tests/snowflake/operators/test_snowpark.py diff --git a/tests/providers/snowflake/triggers/__init__.py b/providers/tests/snowflake/transfers/__init__.py similarity index 100% rename from tests/providers/snowflake/triggers/__init__.py rename to providers/tests/snowflake/transfers/__init__.py diff --git a/tests/providers/snowflake/transfers/test_copy_into_snowflake.py b/providers/tests/snowflake/transfers/test_copy_into_snowflake.py similarity index 100% rename from tests/providers/snowflake/transfers/test_copy_into_snowflake.py rename to providers/tests/snowflake/transfers/test_copy_into_snowflake.py diff --git a/tests/providers/snowflake/utils/__init__.py b/providers/tests/snowflake/triggers/__init__.py similarity index 100% rename from tests/providers/snowflake/utils/__init__.py rename to providers/tests/snowflake/triggers/__init__.py diff --git a/tests/providers/snowflake/triggers/test_snowflake.py b/providers/tests/snowflake/triggers/test_snowflake.py similarity index 100% rename from tests/providers/snowflake/triggers/test_snowflake.py rename to providers/tests/snowflake/triggers/test_snowflake.py diff --git a/tests/providers/standard/__init__.py b/providers/tests/snowflake/utils/__init__.py similarity index 100% rename from tests/providers/standard/__init__.py rename to providers/tests/snowflake/utils/__init__.py diff --git a/tests/providers/snowflake/utils/test_common.py b/providers/tests/snowflake/utils/test_common.py similarity index 100% rename from tests/providers/snowflake/utils/test_common.py rename to providers/tests/snowflake/utils/test_common.py diff --git a/tests/providers/snowflake/utils/test_openlineage.py b/providers/tests/snowflake/utils/test_openlineage.py similarity index 100% rename from tests/providers/snowflake/utils/test_openlineage.py rename to providers/tests/snowflake/utils/test_openlineage.py diff --git a/tests/providers/snowflake/utils/test_snowpark.py b/providers/tests/snowflake/utils/test_snowpark.py similarity index 100% rename from tests/providers/snowflake/utils/test_snowpark.py rename to providers/tests/snowflake/utils/test_snowpark.py diff --git a/tests/providers/snowflake/utils/test_sql_api_generate_jwt.py b/providers/tests/snowflake/utils/test_sql_api_generate_jwt.py similarity index 100% rename from tests/providers/snowflake/utils/test_sql_api_generate_jwt.py rename to providers/tests/snowflake/utils/test_sql_api_generate_jwt.py diff --git a/tests/providers/smtp/hooks/__init__.py b/providers/tests/sqlite/__init__.py similarity index 100% rename from tests/providers/smtp/hooks/__init__.py rename to providers/tests/sqlite/__init__.py diff --git a/tests/providers/smtp/operators/__init__.py b/providers/tests/sqlite/hooks/__init__.py similarity index 100% rename from tests/providers/smtp/operators/__init__.py rename to providers/tests/sqlite/hooks/__init__.py diff --git a/tests/providers/sqlite/hooks/test_sqlite.py b/providers/tests/sqlite/hooks/test_sqlite.py similarity index 100% rename from tests/providers/sqlite/hooks/test_sqlite.py rename to providers/tests/sqlite/hooks/test_sqlite.py diff --git a/tests/providers/sqlite/__init__.py b/providers/tests/sqlite/operators/__init__.py similarity index 100% rename from tests/providers/sqlite/__init__.py rename to providers/tests/sqlite/operators/__init__.py diff --git a/tests/providers/sqlite/operators/test_sqlite.py b/providers/tests/sqlite/operators/test_sqlite.py similarity index 100% rename from tests/providers/sqlite/operators/test_sqlite.py rename to providers/tests/sqlite/operators/test_sqlite.py diff --git a/tests/providers/sqlite/hooks/__init__.py b/providers/tests/ssh/__init__.py similarity index 100% rename from tests/providers/sqlite/hooks/__init__.py rename to providers/tests/ssh/__init__.py diff --git a/tests/providers/sqlite/operators/__init__.py b/providers/tests/ssh/hooks/__init__.py similarity index 100% rename from tests/providers/sqlite/operators/__init__.py rename to providers/tests/ssh/hooks/__init__.py diff --git a/tests/providers/ssh/hooks/test_ssh.py b/providers/tests/ssh/hooks/test_ssh.py similarity index 99% rename from tests/providers/ssh/hooks/test_ssh.py rename to providers/tests/ssh/hooks/test_ssh.py index 71661e5b4ec6d..6b3e5dcdae116 100644 --- a/tests/providers/ssh/hooks/test_ssh.py +++ b/providers/tests/ssh/hooks/test_ssh.py @@ -534,8 +534,9 @@ def test_tunnel(self): args=["python", "-c", HELLO_SERVER_CMD], stdout=subprocess.PIPE, ) - with subprocess.Popen(**subprocess_kwargs) as server_handle, hook.get_tunnel( - local_port=2135, remote_port=2134 + with ( + subprocess.Popen(**subprocess_kwargs) as server_handle, + hook.get_tunnel(local_port=2135, remote_port=2134), ): server_output = server_handle.stdout.read(5) assert b"ready" == server_output diff --git a/tests/providers/ssh/__init__.py b/providers/tests/ssh/operators/__init__.py similarity index 100% rename from tests/providers/ssh/__init__.py rename to providers/tests/ssh/operators/__init__.py diff --git a/tests/providers/ssh/operators/test_ssh.py b/providers/tests/ssh/operators/test_ssh.py similarity index 99% rename from tests/providers/ssh/operators/test_ssh.py rename to providers/tests/ssh/operators/test_ssh.py index 9b4ecbbb20dbf..b304a24882bb9 100644 --- a/tests/providers/ssh/operators/test_ssh.py +++ b/providers/tests/ssh/operators/test_ssh.py @@ -31,7 +31,8 @@ from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.timezone import datetime from airflow.utils.types import NOTSET -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/providers/standard/operators/__init__.py b/providers/tests/standard/__init__.py similarity index 100% rename from tests/providers/standard/operators/__init__.py rename to providers/tests/standard/__init__.py diff --git a/tests/providers/standard/sensors/__init__.py b/providers/tests/standard/operators/__init__.py similarity index 100% rename from tests/providers/standard/sensors/__init__.py rename to providers/tests/standard/operators/__init__.py diff --git a/tests/providers/standard/operators/test_bash.py b/providers/tests/standard/operators/test_bash.py similarity index 99% rename from tests/providers/standard/operators/test_bash.py rename to providers/tests/standard/operators/test_bash.py index 2c29a0b96dc93..305651ee596ae 100644 --- a/tests/providers/standard/operators/test_bash.py +++ b/providers/tests/standard/operators/test_bash.py @@ -32,7 +32,8 @@ from airflow.utils import timezone from airflow.utils.state import State from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/standard/operators/test_datetime.py b/providers/tests/standard/operators/test_datetime.py similarity index 99% rename from tests/providers/standard/operators/test_datetime.py rename to providers/tests/standard/operators/test_datetime.py index 530450a963bd3..4fbb0863d35ae 100644 --- a/tests/providers/standard/operators/test_datetime.py +++ b/providers/tests/standard/operators/test_datetime.py @@ -30,7 +30,8 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/standard/operators/test_weekday.py b/providers/tests/standard/operators/test_weekday.py similarity index 99% rename from tests/providers/standard/operators/test_weekday.py rename to providers/tests/standard/operators/test_weekday.py index 7ad7415c366f7..6c20299d15e79 100644 --- a/tests/providers/standard/operators/test_weekday.py +++ b/providers/tests/standard/operators/test_weekday.py @@ -32,7 +32,8 @@ from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.weekday import WeekDay -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/providers/tableau/__init__.py b/providers/tests/standard/sensors/__init__.py similarity index 100% rename from tests/providers/tableau/__init__.py rename to providers/tests/standard/sensors/__init__.py diff --git a/tests/providers/standard/sensors/test_bash.py b/providers/tests/standard/sensors/test_bash.py similarity index 100% rename from tests/providers/standard/sensors/test_bash.py rename to providers/tests/standard/sensors/test_bash.py diff --git a/tests/providers/standard/sensors/test_date_time.py b/providers/tests/standard/sensors/test_date_time.py similarity index 100% rename from tests/providers/standard/sensors/test_date_time.py rename to providers/tests/standard/sensors/test_date_time.py diff --git a/tests/providers/standard/sensors/test_time.py b/providers/tests/standard/sensors/test_time.py similarity index 100% rename from tests/providers/standard/sensors/test_time.py rename to providers/tests/standard/sensors/test_time.py diff --git a/tests/providers/standard/sensors/test_time_delta.py b/providers/tests/standard/sensors/test_time_delta.py similarity index 100% rename from tests/providers/standard/sensors/test_time_delta.py rename to providers/tests/standard/sensors/test_time_delta.py diff --git a/tests/providers/standard/sensors/test_weekday.py b/providers/tests/standard/sensors/test_weekday.py similarity index 99% rename from tests/providers/standard/sensors/test_weekday.py rename to providers/tests/standard/sensors/test_weekday.py index d4a0f04c2290e..ef0091a9fdd3a 100644 --- a/tests/providers/standard/sensors/test_weekday.py +++ b/providers/tests/standard/sensors/test_weekday.py @@ -27,7 +27,8 @@ from airflow.providers.standard.sensors.weekday import DayOfWeekSensor from airflow.utils.timezone import datetime from airflow.utils.weekday import WeekDay -from tests.test_utils import db + +from dev.tests_common.test_utils import db pytestmark = pytest.mark.db_test diff --git a/tests/providers/tableau/operators/__init__.py b/providers/tests/system/__init__.py similarity index 100% rename from tests/providers/tableau/operators/__init__.py rename to providers/tests/system/__init__.py diff --git a/tests/providers/tableau/sensors/__init__.py b/providers/tests/system/airbyte/__init__.py similarity index 100% rename from tests/providers/tableau/sensors/__init__.py rename to providers/tests/system/airbyte/__init__.py diff --git a/tests/system/providers/airbyte/example_airbyte_trigger_job.py b/providers/tests/system/airbyte/example_airbyte_trigger_job.py similarity index 96% rename from tests/system/providers/airbyte/example_airbyte_trigger_job.py rename to providers/tests/system/airbyte/example_airbyte_trigger_job.py index 4ff2b5e0d48cd..1dd3f01d72618 100644 --- a/tests/system/providers/airbyte/example_airbyte_trigger_job.py +++ b/providers/tests/system/airbyte/example_airbyte_trigger_job.py @@ -61,7 +61,7 @@ # Task dependency created via `XComArgs`: # async_source_destination >> airbyte_sensor -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/teradata/__init__.py b/providers/tests/system/alibaba/__init__.py similarity index 100% rename from tests/providers/teradata/__init__.py rename to providers/tests/system/alibaba/__init__.py diff --git a/tests/system/providers/alibaba/example_adb_spark_batch.py b/providers/tests/system/alibaba/example_adb_spark_batch.py similarity index 93% rename from tests/system/providers/alibaba/example_adb_spark_batch.py rename to providers/tests/system/alibaba/example_adb_spark_batch.py index 9f23693066aec..35e03a088871d 100644 --- a/tests/system/providers/alibaba/example_adb_spark_batch.py +++ b/providers/tests/system/alibaba/example_adb_spark_batch.py @@ -50,14 +50,14 @@ spark_pi >> spark_lr - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() # [END howto_operator_adb_spark_batch] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/alibaba/example_adb_spark_sql.py b/providers/tests/system/alibaba/example_adb_spark_sql.py similarity index 93% rename from tests/system/providers/alibaba/example_adb_spark_sql.py rename to providers/tests/system/alibaba/example_adb_spark_sql.py index fcfe4b896ccba..ac29330d451db 100644 --- a/tests/system/providers/alibaba/example_adb_spark_sql.py +++ b/providers/tests/system/alibaba/example_adb_spark_sql.py @@ -42,14 +42,14 @@ show_databases >> show_tables - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() # [END howto_operator_adb_spark_sql] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/alibaba/example_oss_bucket.py b/providers/tests/system/alibaba/example_oss_bucket.py similarity index 93% rename from tests/system/providers/alibaba/example_oss_bucket.py rename to providers/tests/system/alibaba/example_oss_bucket.py index 1e39d3eb45033..4870a3b577977 100644 --- a/tests/system/providers/alibaba/example_oss_bucket.py +++ b/providers/tests/system/alibaba/example_oss_bucket.py @@ -41,14 +41,14 @@ create_bucket >> delete_bucket - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() # [END howto_operator_oss_bucket] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/alibaba/example_oss_object.py b/providers/tests/system/alibaba/example_oss_object.py similarity index 94% rename from tests/system/providers/alibaba/example_oss_object.py rename to providers/tests/system/alibaba/example_oss_object.py index 5b73fb1ba7a6a..7305c05bf8384 100644 --- a/tests/system/providers/alibaba/example_oss_object.py +++ b/providers/tests/system/alibaba/example_oss_object.py @@ -65,14 +65,14 @@ create_object >> download_object >> delete_object >> delete_batch_object - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/CONTRIBUTING.md b/providers/tests/system/amazon/CONTRIBUTING.md similarity index 99% rename from tests/system/providers/amazon/CONTRIBUTING.md rename to providers/tests/system/amazon/CONTRIBUTING.md index b9aaaa66915c2..f12062aa7fb59 100644 --- a/tests/system/providers/amazon/CONTRIBUTING.md +++ b/providers/tests/system/amazon/CONTRIBUTING.md @@ -212,7 +212,7 @@ chain( task2, # task2 has trigger rule "all done" defined ) -from tests.system.utils.watcher import watcher +from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG diff --git a/tests/system/providers/amazon/README.md b/providers/tests/system/amazon/README.md similarity index 100% rename from tests/system/providers/amazon/README.md rename to providers/tests/system/amazon/README.md diff --git a/tests/providers/ssh/hooks/__init__.py b/providers/tests/system/amazon/__init__.py similarity index 100% rename from tests/providers/ssh/hooks/__init__.py rename to providers/tests/system/amazon/__init__.py diff --git a/tests/providers/ssh/operators/__init__.py b/providers/tests/system/amazon/aws/__init__.py similarity index 100% rename from tests/providers/ssh/operators/__init__.py rename to providers/tests/system/amazon/aws/__init__.py diff --git a/tests/system/providers/amazon/aws/example_appflow.py b/providers/tests/system/amazon/aws/example_appflow.py similarity index 94% rename from tests/system/providers/amazon/aws/example_appflow.py rename to providers/tests/system/amazon/aws/example_appflow.py index 5ba38533b0211..db9dd9a428947 100644 --- a/tests/system/providers/amazon/aws/example_appflow.py +++ b/providers/tests/system/amazon/aws/example_appflow.py @@ -28,7 +28,8 @@ AppflowRunFullOperator, ) from airflow.providers.standard.operators.bash import BashOperator -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -110,13 +111,13 @@ should_be_skipped, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_appflow_run.py b/providers/tests/system/amazon/aws/example_appflow_run.py similarity index 97% rename from tests/system/providers/amazon/aws/example_appflow_run.py rename to providers/tests/system/amazon/aws/example_appflow_run.py index d73a426127808..d1151d1dd5c8b 100644 --- a/tests/system/providers/amazon/aws/example_appflow_run.py +++ b/providers/tests/system/amazon/aws/example_appflow_run.py @@ -33,7 +33,8 @@ S3DeleteBucketOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -192,13 +193,13 @@ def delete_flow(flow_name: str): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_athena.py b/providers/tests/system/amazon/aws/example_athena.py similarity index 96% rename from tests/system/providers/amazon/aws/example_athena.py rename to providers/tests/system/amazon/aws/example_athena.py index 92c56c24ee7fc..2ee1c11ab7881 100644 --- a/tests/system/providers/amazon/aws/example_athena.py +++ b/providers/tests/system/amazon/aws/example_athena.py @@ -32,7 +32,8 @@ ) from airflow.providers.amazon.aws.sensors.athena import AthenaSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -173,14 +174,14 @@ def read_results_from_s3(bucket_name, query_execution_id): delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_azure_blob_to_s3.py b/providers/tests/system/amazon/aws/example_azure_blob_to_s3.py similarity index 93% rename from tests/system/providers/amazon/aws/example_azure_blob_to_s3.py rename to providers/tests/system/amazon/aws/example_azure_blob_to_s3.py index 0109b8813063d..78a55a6a7f7dc 100644 --- a/tests/system/providers/amazon/aws/example_azure_blob_to_s3.py +++ b/providers/tests/system/amazon/aws/example_azure_blob_to_s3.py @@ -23,7 +23,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.azure_blob_to_s3 import AzureBlobStorageToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -71,13 +72,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_batch.py b/providers/tests/system/amazon/aws/example_batch.py similarity index 97% rename from tests/system/providers/amazon/aws/example_batch.py rename to providers/tests/system/amazon/aws/example_batch.py index b33078407a296..da035050a77b4 100644 --- a/tests/system/providers/amazon/aws/example_batch.py +++ b/providers/tests/system/amazon/aws/example_batch.py @@ -32,7 +32,8 @@ BatchSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ( + +from providers.tests.system.amazon.aws.utils import ( ENV_ID_KEY, SystemTestContextBuilder, prune_logs, @@ -282,13 +283,13 @@ def delete_job_queue(job_queue_name): log_cleanup, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_bedrock.py b/providers/tests/system/amazon/aws/example_bedrock.py similarity index 97% rename from tests/system/providers/amazon/aws/example_bedrock.py rename to providers/tests/system/amazon/aws/example_bedrock.py index 3c015496f992e..8e67ee4d3880b 100644 --- a/tests/system/providers/amazon/aws/example_bedrock.py +++ b/providers/tests/system/amazon/aws/example_bedrock.py @@ -43,7 +43,8 @@ ) from airflow.utils.edgemodifier import Label from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder # Externally fetched variables: ROLE_ARN_KEY = "ROLE_ARN" @@ -217,14 +218,14 @@ def run_or_skip(): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py b/providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py similarity index 99% rename from tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py rename to providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py index a1d1211da4c46..0d9eb278460f8 100644 --- a/tests/system/providers/amazon/aws/example_bedrock_retrieve_and_generate.py +++ b/providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py @@ -60,7 +60,8 @@ from airflow.providers.amazon.aws.utils import get_botocore_version from airflow.utils.edgemodifier import Label from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder ####################################################################### # NOTE: @@ -593,14 +594,14 @@ def delete_opensearch_policies(collection_name: str): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_cloudformation.py b/providers/tests/system/amazon/aws/example_cloudformation.py similarity index 95% rename from tests/system/providers/amazon/aws/example_cloudformation.py rename to providers/tests/system/amazon/aws/example_cloudformation.py index a376addd3489b..38827eebbdb5f 100644 --- a/tests/system/providers/amazon/aws/example_cloudformation.py +++ b/providers/tests/system/amazon/aws/example_cloudformation.py @@ -30,7 +30,8 @@ CloudFormationDeleteStackSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -107,14 +108,14 @@ wait_for_stack_delete, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_comprehend.py b/providers/tests/system/amazon/aws/example_comprehend.py similarity index 95% rename from tests/system/providers/amazon/aws/example_comprehend.py rename to providers/tests/system/amazon/aws/example_comprehend.py index 58e34329b67f7..d8a1f9af88b6c 100644 --- a/tests/system/providers/amazon/aws/example_comprehend.py +++ b/providers/tests/system/amazon/aws/example_comprehend.py @@ -32,7 +32,8 @@ ComprehendStartPiiEntitiesDetectionJobCompletedSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() @@ -125,13 +126,13 @@ def pii_entities_detection_job_workflow(): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_comprehend_document_classifier.py b/providers/tests/system/amazon/aws/example_comprehend_document_classifier.py similarity index 97% rename from tests/system/providers/amazon/aws/example_comprehend_document_classifier.py rename to providers/tests/system/amazon/aws/example_comprehend_document_classifier.py index 08750da98141c..f5308265a83ca 100644 --- a/tests/system/providers/amazon/aws/example_comprehend_document_classifier.py +++ b/providers/tests/system/amazon/aws/example_comprehend_document_classifier.py @@ -37,7 +37,8 @@ ) from airflow.providers.amazon.aws.transfers.http_to_s3 import HttpToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() @@ -230,13 +231,13 @@ def delete_connection(conn_id): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_datasync.py b/providers/tests/system/amazon/aws/example_datasync.py similarity index 97% rename from tests/system/providers/amazon/aws/example_datasync.py rename to providers/tests/system/amazon/aws/example_datasync.py index 8f57d1c656068..a8363fe81b36e 100644 --- a/tests/system/providers/amazon/aws/example_datasync.py +++ b/providers/tests/system/amazon/aws/example_datasync.py @@ -26,7 +26,8 @@ from airflow.providers.amazon.aws.operators.datasync import DataSyncOperator from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_datasync" @@ -234,13 +235,13 @@ def delete_locations(locations): delete_s3_bucket_destination, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_dms.py b/providers/tests/system/amazon/aws/example_dms.py similarity index 98% rename from tests/system/providers/amazon/aws/example_dms.py rename to providers/tests/system/amazon/aws/example_dms.py index 32506a0268bfb..b2e99daf6290f 100644 --- a/tests/system/providers/amazon/aws/example_dms.py +++ b/providers/tests/system/amazon/aws/example_dms.py @@ -46,8 +46,9 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.sensors.dms import DmsTaskBaseSensor, DmsTaskCompletedSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from tests.system.providers.amazon.aws.utils.ec2 import get_default_vpc_id + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.tests.system.amazon.aws.utils.ec2 import get_default_vpc_id DAG_ID = "example_dms" ROLE_ARN_KEY = "ROLE_ARN" @@ -428,13 +429,13 @@ def delete_security_group(security_group_id: str, security_group_name: str): delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_dynamodb.py b/providers/tests/system/amazon/aws/example_dynamodb.py similarity index 95% rename from tests/system/providers/amazon/aws/example_dynamodb.py rename to providers/tests/system/amazon/aws/example_dynamodb.py index 6c3d770c9ed99..3d709c36b02ad 100644 --- a/tests/system/providers/amazon/aws/example_dynamodb.py +++ b/providers/tests/system/amazon/aws/example_dynamodb.py @@ -25,7 +25,8 @@ from airflow.models.dag import DAG from airflow.providers.amazon.aws.sensors.dynamodb import DynamoDBValueSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder # TODO: FIXME The argument types here seems somewhat tricky to fix # mypy: disable-error-code="arg-type" @@ -116,14 +117,14 @@ def delete_table(table_name: str): delete_table, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_dynamodb_to_s3.py b/providers/tests/system/amazon/aws/example_dynamodb_to_s3.py similarity index 97% rename from tests/system/providers/amazon/aws/example_dynamodb_to_s3.py rename to providers/tests/system/amazon/aws/example_dynamodb_to_s3.py index e22bc2080d5ec..86a5d76c9587f 100644 --- a/tests/system/providers/amazon/aws/example_dynamodb_to_s3.py +++ b/providers/tests/system/amazon/aws/example_dynamodb_to_s3.py @@ -31,7 +31,8 @@ from airflow.providers.amazon.aws.transfers.dynamodb_to_s3 import DynamoDBToS3Operator from airflow.utils.edgemodifier import Label from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder log = logging.getLogger(__name__) @@ -255,13 +256,13 @@ def skip_incremental_export(start_time: datetime, end_time: datetime): delete_table, delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_ec2.py b/providers/tests/system/amazon/aws/example_ec2.py similarity index 97% rename from tests/system/providers/amazon/aws/example_ec2.py rename to providers/tests/system/amazon/aws/example_ec2.py index 10b9c62338b90..e55adab775ffd 100644 --- a/tests/system/providers/amazon/aws/example_ec2.py +++ b/providers/tests/system/amazon/aws/example_ec2.py @@ -34,7 +34,8 @@ ) from airflow.providers.amazon.aws.sensors.ec2 import EC2InstanceStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_ec2" @@ -201,13 +202,13 @@ def parse_response(instance_ids: list): delete_key_pair(key_name), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_ecs.py b/providers/tests/system/amazon/aws/example_ecs.py similarity index 97% rename from tests/system/providers/amazon/aws/example_ecs.py rename to providers/tests/system/amazon/aws/example_ecs.py index 98617dcbcb6d4..e1aad99b5e7cb 100644 --- a/tests/system/providers/amazon/aws/example_ecs.py +++ b/providers/tests/system/amazon/aws/example_ecs.py @@ -36,7 +36,8 @@ EcsTaskDefinitionStateSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_ecs" @@ -211,14 +212,14 @@ def clean_logs(group_name: str): clean_logs(log_group_name), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_ecs_fargate.py b/providers/tests/system/amazon/aws/example_ecs_fargate.py similarity index 96% rename from tests/system/providers/amazon/aws/example_ecs_fargate.py rename to providers/tests/system/amazon/aws/example_ecs_fargate.py index 6a299c788ca19..ca592234aa77f 100644 --- a/tests/system/providers/amazon/aws/example_ecs_fargate.py +++ b/providers/tests/system/amazon/aws/example_ecs_fargate.py @@ -27,7 +27,8 @@ from airflow.providers.amazon.aws.operators.ecs import EcsRunTaskOperator from airflow.providers.amazon.aws.sensors.ecs import EcsTaskStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_ecs_fargate" @@ -155,13 +156,13 @@ def delete_cluster(cluster_name: str) -> None: delete_cluster(cluster_name), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_eks_templated.py b/providers/tests/system/amazon/aws/example_eks_templated.py similarity index 96% rename from tests/system/providers/amazon/aws/example_eks_templated.py rename to providers/tests/system/amazon/aws/example_eks_templated.py index 7ad8bc77c2ece..3b1ba5059725a 100644 --- a/tests/system/providers/amazon/aws/example_eks_templated.py +++ b/providers/tests/system/amazon/aws/example_eks_templated.py @@ -29,7 +29,8 @@ EksPodOperator, ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -147,13 +148,13 @@ await_delete_cluster, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_eks_with_fargate_in_one_step.py b/providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py similarity index 95% rename from tests/system/providers/amazon/aws/example_eks_with_fargate_in_one_step.py rename to providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py index ae67a26588bdc..27ee2be0d728a 100644 --- a/tests/system/providers/amazon/aws/example_eks_with_fargate_in_one_step.py +++ b/providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py @@ -28,8 +28,9 @@ ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksFargateProfileStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from tests.system.providers.amazon.aws.utils.k8s import get_describe_pod_operator + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator DAG_ID = "example_eks_with_fargate_in_one_step" @@ -140,14 +141,14 @@ await_delete_cluster, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py b/providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py similarity index 95% rename from tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py rename to providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py index 9cce50b9bac46..34cbed1645fa5 100644 --- a/tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py +++ b/providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py @@ -30,8 +30,9 @@ ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksFargateProfileStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from tests.system.providers.amazon.aws.utils.k8s import get_describe_pod_operator + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator DAG_ID = "example_eks_with_fargate_profile" @@ -174,13 +175,13 @@ await_delete_cluster, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_eks_with_nodegroup_in_one_step.py b/providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py similarity index 95% rename from tests/system/providers/amazon/aws/example_eks_with_nodegroup_in_one_step.py rename to providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py index 9bfce2cc1696f..11f843688524c 100644 --- a/tests/system/providers/amazon/aws/example_eks_with_nodegroup_in_one_step.py +++ b/providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py @@ -31,8 +31,9 @@ ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from tests.system.providers.amazon.aws.utils.k8s import get_describe_pod_operator + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator DAG_ID = "example_eks_with_nodegroup_in_one_step" @@ -154,14 +155,14 @@ def delete_launch_template(template_name: str): delete_launch_template(launch_template_name), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_eks_with_nodegroups.py b/providers/tests/system/amazon/aws/example_eks_with_nodegroups.py similarity index 96% rename from tests/system/providers/amazon/aws/example_eks_with_nodegroups.py rename to providers/tests/system/amazon/aws/example_eks_with_nodegroups.py index a6681ba67f738..8356a8ee4375c 100644 --- a/tests/system/providers/amazon/aws/example_eks_with_nodegroups.py +++ b/providers/tests/system/amazon/aws/example_eks_with_nodegroups.py @@ -33,8 +33,9 @@ ) from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from tests.system.providers.amazon.aws.utils.k8s import get_describe_pod_operator + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder +from providers.tests.system.amazon.aws.utils.k8s import get_describe_pod_operator DAG_ID = "example_eks_with_nodegroups" @@ -197,14 +198,14 @@ def delete_launch_template(template_name: str): delete_launch_template(launch_template_name), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_emr.py b/providers/tests/system/amazon/aws/example_emr.py similarity index 97% rename from tests/system/providers/amazon/aws/example_emr.py rename to providers/tests/system/amazon/aws/example_emr.py index 7d62180d07d9c..44c28630eb69e 100644 --- a/tests/system/providers/amazon/aws/example_emr.py +++ b/providers/tests/system/amazon/aws/example_emr.py @@ -36,7 +36,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.sensors.emr import EmrJobFlowSensor, EmrStepSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_emr" CONFIG_NAME = "EMR Runtime Role Security Configuration" @@ -226,14 +227,14 @@ def get_step_id(step_ids: list): ] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_emr_eks.py b/providers/tests/system/amazon/aws/example_emr_eks.py similarity index 98% rename from tests/system/providers/amazon/aws/example_emr_eks.py rename to providers/tests/system/amazon/aws/example_emr_eks.py index 8b182d42440bf..d95645fd306a6 100644 --- a/tests/system/providers/amazon/aws/example_emr_eks.py +++ b/providers/tests/system/amazon/aws/example_emr_eks.py @@ -36,7 +36,8 @@ from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor from airflow.providers.amazon.aws.sensors.emr import EmrContainerSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_emr_eks" @@ -319,13 +320,13 @@ def delete_virtual_cluster(virtual_cluster_id): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_emr_notebook_execution.py b/providers/tests/system/amazon/aws/example_emr_notebook_execution.py similarity index 95% rename from tests/system/providers/amazon/aws/example_emr_notebook_execution.py rename to providers/tests/system/amazon/aws/example_emr_notebook_execution.py index 86fc3055a1624..48d2b14c96573 100644 --- a/tests/system/providers/amazon/aws/example_emr_notebook_execution.py +++ b/providers/tests/system/amazon/aws/example_emr_notebook_execution.py @@ -27,7 +27,8 @@ EmrStopNotebookExecutionOperator, ) from airflow.providers.amazon.aws.sensors.emr import EmrNotebookExecutionSensor -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_emr_notebook" # Externally fetched variables: @@ -111,13 +112,13 @@ wait_for_execution_finish, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_emr_serverless.py b/providers/tests/system/amazon/aws/example_emr_serverless.py similarity index 96% rename from tests/system/providers/amazon/aws/example_emr_serverless.py rename to providers/tests/system/amazon/aws/example_emr_serverless.py index 1bca31de5f39e..4901fa6736059 100644 --- a/tests/system/providers/amazon/aws/example_emr_serverless.py +++ b/providers/tests/system/amazon/aws/example_emr_serverless.py @@ -32,7 +32,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.sensors.emr import EmrServerlessApplicationSensor, EmrServerlessJobSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_emr_serverless" @@ -154,13 +155,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_eventbridge.py b/providers/tests/system/amazon/aws/example_eventbridge.py similarity index 95% rename from tests/system/providers/amazon/aws/example_eventbridge.py rename to providers/tests/system/amazon/aws/example_eventbridge.py index 9b3bb922e1669..5470bd0ca70cc 100644 --- a/tests/system/providers/amazon/aws/example_eventbridge.py +++ b/providers/tests/system/amazon/aws/example_eventbridge.py @@ -26,7 +26,8 @@ EventBridgePutEventsOperator, EventBridgePutRuleOperator, ) -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_eventbridge" ENTRIES = [ @@ -80,7 +81,7 @@ chain(test_context, put_events, put_rule, enable_rule, disable_rule) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_ftp_to_s3.py b/providers/tests/system/amazon/aws/example_ftp_to_s3.py similarity index 92% rename from tests/system/providers/amazon/aws/example_ftp_to_s3.py rename to providers/tests/system/amazon/aws/example_ftp_to_s3.py index ef62bdc06a733..98a37e197f8fe 100644 --- a/tests/system/providers/amazon/aws/example_ftp_to_s3.py +++ b/providers/tests/system/amazon/aws/example_ftp_to_s3.py @@ -23,7 +23,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.ftp_to_s3 import FTPToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -71,13 +72,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_gcs_to_s3.py b/providers/tests/system/amazon/aws/example_gcs_to_s3.py similarity index 95% rename from tests/system/providers/amazon/aws/example_gcs_to_s3.py rename to providers/tests/system/amazon/aws/example_gcs_to_s3.py index 198f9729feb3f..97ed8dfda3a98 100644 --- a/tests/system/providers/amazon/aws/example_gcs_to_s3.py +++ b/providers/tests/system/amazon/aws/example_gcs_to_s3.py @@ -32,7 +32,8 @@ GCSDeleteBucketOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder # Externally fetched variables: GCP_PROJECT_ID = "GCP_PROJECT_ID" @@ -114,13 +115,13 @@ def upload_gcs_file(bucket_name: str, object_name: str, user_project: str): delete_gcs_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_glacier_to_gcs.py b/providers/tests/system/amazon/aws/example_glacier_to_gcs.py similarity index 95% rename from tests/system/providers/amazon/aws/example_glacier_to_gcs.py rename to providers/tests/system/amazon/aws/example_glacier_to_gcs.py index e57029f33aec0..28c061ae057c9 100644 --- a/tests/system/providers/amazon/aws/example_glacier_to_gcs.py +++ b/providers/tests/system/amazon/aws/example_glacier_to_gcs.py @@ -30,7 +30,8 @@ from airflow.providers.amazon.aws.sensors.glacier import GlacierJobOperationSensor from airflow.providers.amazon.aws.transfers.glacier_to_gcs import GlacierToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -107,13 +108,13 @@ def delete_vault(vault_name): delete_vault(vault_name), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_glue.py b/providers/tests/system/amazon/aws/example_glue.py similarity index 97% rename from tests/system/providers/amazon/aws/example_glue.py rename to providers/tests/system/amazon/aws/example_glue.py index c16aaf8677c17..5314d26eff80c 100644 --- a/tests/system/providers/amazon/aws/example_glue.py +++ b/providers/tests/system/amazon/aws/example_glue.py @@ -35,7 +35,8 @@ from airflow.providers.amazon.aws.sensors.glue_catalog_partition import GlueCatalogPartitionSensor from airflow.providers.amazon.aws.sensors.glue_crawler import GlueCrawlerSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs if TYPE_CHECKING: from botocore.client import BaseClient @@ -216,13 +217,13 @@ def glue_cleanup(crawler_name: str, job_name: str, db_name: str) -> None: log_cleanup, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_glue_data_quality.py b/providers/tests/system/amazon/aws/example_glue_data_quality.py similarity index 97% rename from tests/system/providers/amazon/aws/example_glue_data_quality.py rename to providers/tests/system/amazon/aws/example_glue_data_quality.py index e9b8f418e2f6e..4d05d6e22b436 100644 --- a/tests/system/providers/amazon/aws/example_glue_data_quality.py +++ b/providers/tests/system/amazon/aws/example_glue_data_quality.py @@ -34,7 +34,8 @@ ) from airflow.providers.amazon.aws.sensors.glue import GlueDataQualityRuleSetEvaluationRunSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() @@ -197,13 +198,13 @@ def delete_ruleset(ruleset_name): delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_glue_data_quality_with_recommendation.py b/providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py similarity index 97% rename from tests/system/providers/amazon/aws/example_glue_data_quality_with_recommendation.py rename to providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py index 231750e97134b..fdb3cb8e63ad8 100644 --- a/tests/system/providers/amazon/aws/example_glue_data_quality_with_recommendation.py +++ b/providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py @@ -37,7 +37,8 @@ GlueDataQualityRuleSetEvaluationRunSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() @@ -196,13 +197,13 @@ def delete_ruleset(ruleset_name): delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_glue_databrew.py b/providers/tests/system/amazon/aws/example_glue_databrew.py similarity index 95% rename from tests/system/providers/amazon/aws/example_glue_databrew.py rename to providers/tests/system/amazon/aws/example_glue_databrew.py index 251c7611b3be3..3218b465c290e 100644 --- a/tests/system/providers/amazon/aws/example_glue_databrew.py +++ b/providers/tests/system/amazon/aws/example_glue_databrew.py @@ -31,7 +31,8 @@ S3DeleteBucketOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_glue_databrew" @@ -154,14 +155,14 @@ def delete_job(job_name: str): delete_output_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_google_api_sheets_to_s3.py b/providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py similarity index 94% rename from tests/system/providers/amazon/aws/example_google_api_sheets_to_s3.py rename to providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py index b736308324f6e..7ea7ae9d072f7 100644 --- a/tests/system/providers/amazon/aws/example_google_api_sheets_to_s3.py +++ b/providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py @@ -29,7 +29,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -82,13 +83,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_google_api_youtube_to_s3.py b/providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py similarity index 97% rename from tests/system/providers/amazon/aws/example_google_api_youtube_to_s3.py rename to providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py index f6195f78018da..ae4f721567cec 100644 --- a/tests/system/providers/amazon/aws/example_google_api_youtube_to_s3.py +++ b/providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py @@ -61,7 +61,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_google_api_youtube_to_s3" @@ -196,13 +197,13 @@ def transform_video_ids(**kwargs): delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_hive_to_dynamodb.py b/providers/tests/system/amazon/aws/example_hive_to_dynamodb.py similarity index 96% rename from tests/system/providers/amazon/aws/example_hive_to_dynamodb.py rename to providers/tests/system/amazon/aws/example_hive_to_dynamodb.py index 6b35b9dfca8e4..5582c13b855f5 100644 --- a/tests/system/providers/amazon/aws/example_hive_to_dynamodb.py +++ b/providers/tests/system/amazon/aws/example_hive_to_dynamodb.py @@ -32,7 +32,8 @@ from airflow.providers.amazon.aws.transfers.hive_to_dynamodb import HiveToDynamoDBOperator from airflow.utils import db from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_hive_to_dynamodb" @@ -149,13 +150,13 @@ def configure_hive_connection(connection_id, hostname): delete_dynamodb_table(dynamodb_table_name), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_http_to_s3.py b/providers/tests/system/amazon/aws/example_http_to_s3.py similarity index 94% rename from tests/system/providers/amazon/aws/example_http_to_s3.py rename to providers/tests/system/amazon/aws/example_http_to_s3.py index d6424f9802155..cfbc131ee5689 100644 --- a/tests/system/providers/amazon/aws/example_http_to_s3.py +++ b/providers/tests/system/amazon/aws/example_http_to_s3.py @@ -27,7 +27,8 @@ from airflow.providers.amazon.aws.transfers.http_to_s3 import HttpToS3Operator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -118,13 +119,13 @@ def create_connection(conn_id_name: str): delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_imap_attachment_to_s3.py b/providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py similarity index 94% rename from tests/system/providers/amazon/aws/example_imap_attachment_to_s3.py rename to providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py index 7b7fc1bb387d2..b43023115cf4d 100644 --- a/tests/system/providers/amazon/aws/example_imap_attachment_to_s3.py +++ b/providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py @@ -28,7 +28,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_imap_attachment_to_s3" @@ -89,13 +90,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_kinesis_analytics.py b/providers/tests/system/amazon/aws/example_kinesis_analytics.py similarity index 97% rename from tests/system/providers/amazon/aws/example_kinesis_analytics.py rename to providers/tests/system/amazon/aws/example_kinesis_analytics.py index 007a35407437d..600710cc43b4b 100644 --- a/tests/system/providers/amazon/aws/example_kinesis_analytics.py +++ b/providers/tests/system/amazon/aws/example_kinesis_analytics.py @@ -43,7 +43,8 @@ ) from airflow.providers.amazon.aws.transfers.http_to_s3 import HttpToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder ROLE_ARN_KEY = "ROLE_ARN" sys_test_context_task = SystemTestContextBuilder().add_variable(ROLE_ARN_KEY).build() @@ -262,13 +263,13 @@ def delete_kinesis_stream(stream: str, region: str): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_lambda.py b/providers/tests/system/amazon/aws/example_lambda.py similarity index 95% rename from tests/system/providers/amazon/aws/example_lambda.py rename to providers/tests/system/amazon/aws/example_lambda.py index 4ba74a26d5f22..fd346a34589f0 100644 --- a/tests/system/providers/amazon/aws/example_lambda.py +++ b/providers/tests/system/amazon/aws/example_lambda.py @@ -32,7 +32,8 @@ ) from airflow.providers.amazon.aws.sensors.lambda_function import LambdaFunctionStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs DAG_ID = "example_lambda" @@ -128,13 +129,13 @@ def delete_lambda(function_name: str): log_cleanup, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_local_to_s3.py b/providers/tests/system/amazon/aws/example_local_to_s3.py similarity index 93% rename from tests/system/providers/amazon/aws/example_local_to_s3.py rename to providers/tests/system/amazon/aws/example_local_to_s3.py index da979bcd37854..47344c429b958 100644 --- a/tests/system/providers/amazon/aws/example_local_to_s3.py +++ b/providers/tests/system/amazon/aws/example_local_to_s3.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.local_to_s3 import LocalFilesystemToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -90,14 +91,14 @@ def delete_temp_file(): delete_temp_file(), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_mongo_to_s3.py b/providers/tests/system/amazon/aws/example_mongo_to_s3.py similarity index 93% rename from tests/system/providers/amazon/aws/example_mongo_to_s3.py rename to providers/tests/system/amazon/aws/example_mongo_to_s3.py index 33bcd171c0d82..3e5bc318e5c17 100644 --- a/tests/system/providers/amazon/aws/example_mongo_to_s3.py +++ b/providers/tests/system/amazon/aws/example_mongo_to_s3.py @@ -22,7 +22,8 @@ from airflow.providers.amazon.aws.transfers.mongo_to_s3 import MongoToS3Operator from airflow.utils.dates import datetime from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_mongo_to_s3" @@ -82,13 +83,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_neptune.py b/providers/tests/system/amazon/aws/example_neptune.py similarity index 93% rename from tests/system/providers/amazon/aws/example_neptune.py rename to providers/tests/system/amazon/aws/example_neptune.py index 81276c2dcd86a..4685840b42594 100644 --- a/tests/system/providers/amazon/aws/example_neptune.py +++ b/providers/tests/system/amazon/aws/example_neptune.py @@ -26,7 +26,8 @@ NeptuneStartDbClusterOperator, NeptuneStopDbClusterOperator, ) -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder DAG_ID = "example_neptune" @@ -76,13 +77,13 @@ def delete_cluster(cluster_id): # TEST TEARDOWN delete_cluster(cluster_id), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_quicksight.py b/providers/tests/system/amazon/aws/example_quicksight.py similarity index 97% rename from tests/system/providers/amazon/aws/example_quicksight.py rename to providers/tests/system/amazon/aws/example_quicksight.py index ccf4746122f65..ec5cd38003148 100644 --- a/tests/system/providers/amazon/aws/example_quicksight.py +++ b/providers/tests/system/amazon/aws/example_quicksight.py @@ -33,7 +33,8 @@ ) from airflow.providers.amazon.aws.sensors.quicksight import QuickSightSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder """ Prerequisites: @@ -217,13 +218,13 @@ def delete_ingestion(aws_account_id: str, dataset_name: str, ingestion_name: str delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_rds_event.py b/providers/tests/system/amazon/aws/example_rds_event.py similarity index 95% rename from tests/system/providers/amazon/aws/example_rds_event.py rename to providers/tests/system/amazon/aws/example_rds_event.py index 0d397f43066e4..79d70c9d394e2 100644 --- a/tests/system/providers/amazon/aws/example_rds_event.py +++ b/providers/tests/system/amazon/aws/example_rds_event.py @@ -31,7 +31,8 @@ RdsDeleteEventSubscriptionOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_rds_event" @@ -118,14 +119,14 @@ def delete_sns_topic(topic_arn) -> None: delete_sns_topic(sns_topic), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_rds_export.py b/providers/tests/system/amazon/aws/example_rds_export.py similarity index 96% rename from tests/system/providers/amazon/aws/example_rds_export.py rename to providers/tests/system/amazon/aws/example_rds_export.py index 385a1871e7e27..34638f030f506 100644 --- a/tests/system/providers/amazon/aws/example_rds_export.py +++ b/providers/tests/system/amazon/aws/example_rds_export.py @@ -33,7 +33,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.sensors.rds import RdsExportTaskExistenceSensor, RdsSnapshotExistenceSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_rds_export" @@ -177,13 +178,13 @@ def get_snapshot_arn(snapshot_name: str) -> str: delete_db_instance, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_rds_instance.py b/providers/tests/system/amazon/aws/example_rds_instance.py similarity index 95% rename from tests/system/providers/amazon/aws/example_rds_instance.py rename to providers/tests/system/amazon/aws/example_rds_instance.py index f917fb128486c..ce49f73c1f4ae 100644 --- a/tests/system/providers/amazon/aws/example_rds_instance.py +++ b/providers/tests/system/amazon/aws/example_rds_instance.py @@ -28,7 +28,8 @@ ) from airflow.providers.amazon.aws.sensors.rds import RdsDbSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -110,14 +111,14 @@ delete_db_instance, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_rds_snapshot.py b/providers/tests/system/amazon/aws/example_rds_snapshot.py similarity index 96% rename from tests/system/providers/amazon/aws/example_rds_snapshot.py rename to providers/tests/system/amazon/aws/example_rds_snapshot.py index 5585f339feec2..c58c1db54d539 100644 --- a/tests/system/providers/amazon/aws/example_rds_snapshot.py +++ b/providers/tests/system/amazon/aws/example_rds_snapshot.py @@ -29,7 +29,8 @@ ) from airflow.providers.amazon.aws.sensors.rds import RdsSnapshotExistenceSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_rds_snapshot" @@ -137,14 +138,14 @@ delete_db_instance, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_redshift.py b/providers/tests/system/amazon/aws/example_redshift.py similarity index 97% rename from tests/system/providers/amazon/aws/example_redshift.py rename to providers/tests/system/amazon/aws/example_redshift.py index 67b822d41ef55..986bce5a07b52 100644 --- a/tests/system/providers/amazon/aws/example_redshift.py +++ b/providers/tests/system/amazon/aws/example_redshift.py @@ -33,7 +33,8 @@ from airflow.providers.amazon.aws.operators.redshift_data import RedshiftDataOperator from airflow.providers.amazon.aws.sensors.redshift_cluster import RedshiftClusterSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_redshift" @@ -247,13 +248,13 @@ delete_cluster_snapshot, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_redshift_s3_transfers.py b/providers/tests/system/amazon/aws/example_redshift_s3_transfers.py similarity index 98% rename from tests/system/providers/amazon/aws/example_redshift_s3_transfers.py rename to providers/tests/system/amazon/aws/example_redshift_s3_transfers.py index 9fb989ec53697..2a553eae8cb60 100644 --- a/tests/system/providers/amazon/aws/example_redshift_s3_transfers.py +++ b/providers/tests/system/amazon/aws/example_redshift_s3_transfers.py @@ -35,7 +35,8 @@ from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_redshift_to_s3" @@ -317,13 +318,13 @@ def _insert_data(table_name: str) -> str: delete_cluster, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_s3.py b/providers/tests/system/amazon/aws/example_s3.py similarity index 98% rename from tests/system/providers/amazon/aws/example_s3.py rename to providers/tests/system/amazon/aws/example_s3.py index 06f60b4ac99cf..d016076022874 100644 --- a/tests/system/providers/amazon/aws/example_s3.py +++ b/providers/tests/system/amazon/aws/example_s3.py @@ -36,7 +36,8 @@ ) from airflow.providers.amazon.aws.sensors.s3 import S3KeySensor, S3KeysUnchangedSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_s3" @@ -322,13 +323,13 @@ def check_fn(files: list, **kwargs) -> bool: delete_bucket, delete_bucket_2, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_s3_to_dynamodb.py b/providers/tests/system/amazon/aws/example_s3_to_dynamodb.py similarity index 96% rename from tests/system/providers/amazon/aws/example_s3_to_dynamodb.py rename to providers/tests/system/amazon/aws/example_s3_to_dynamodb.py index b415ffad7bdc1..efd762cfd93c1 100644 --- a/tests/system/providers/amazon/aws/example_s3_to_dynamodb.py +++ b/providers/tests/system/amazon/aws/example_s3_to_dynamodb.py @@ -31,7 +31,8 @@ ) from airflow.providers.amazon.aws.transfers.s3_to_dynamodb import S3ToDynamoDBOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder log = logging.getLogger(__name__) @@ -180,13 +181,13 @@ def delete_dynamodb_table(table_name: str): delete_new_table, delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_s3_to_ftp.py b/providers/tests/system/amazon/aws/example_s3_to_ftp.py similarity index 92% rename from tests/system/providers/amazon/aws/example_s3_to_ftp.py rename to providers/tests/system/amazon/aws/example_s3_to_ftp.py index bf909ddc8a219..0ffb4722ba867 100644 --- a/tests/system/providers/amazon/aws/example_s3_to_ftp.py +++ b/providers/tests/system/amazon/aws/example_s3_to_ftp.py @@ -23,7 +23,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.s3_to_ftp import S3ToFTPOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -70,13 +71,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_s3_to_sftp.py b/providers/tests/system/amazon/aws/example_s3_to_sftp.py similarity index 92% rename from tests/system/providers/amazon/aws/example_s3_to_sftp.py rename to providers/tests/system/amazon/aws/example_s3_to_sftp.py index 78825d7151468..5270d3ec7d4b9 100644 --- a/tests/system/providers/amazon/aws/example_s3_to_sftp.py +++ b/providers/tests/system/amazon/aws/example_s3_to_sftp.py @@ -23,7 +23,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -70,13 +71,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_s3_to_sql.py b/providers/tests/system/amazon/aws/example_s3_to_sql.py similarity index 97% rename from tests/system/providers/amazon/aws/example_s3_to_sql.py rename to providers/tests/system/amazon/aws/example_s3_to_sql.py index 5e5e2ba010e89..e7e8a5e9543cf 100644 --- a/tests/system/providers/amazon/aws/example_s3_to_sql.py +++ b/providers/tests/system/amazon/aws/example_s3_to_sql.py @@ -39,8 +39,9 @@ from airflow.providers.amazon.aws.transfers.s3_to_sql import S3ToSqlOperator from airflow.providers.common.sql.operators.sql import SQLTableCheckOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder -from tests.system.utils.watcher import watcher + +from dev.tests_common.test_utils.watcher import watcher +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder # Externally fetched variables: SECURITY_GROUP_KEY = "SECURITY_GROUP" @@ -253,7 +254,7 @@ def parse_csv_to_generator(filepath): list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sagemaker.py b/providers/tests/system/amazon/aws/example_sagemaker.py similarity index 99% rename from tests/system/providers/amazon/aws/example_sagemaker.py rename to providers/tests/system/amazon/aws/example_sagemaker.py index 15b756494eac3..acb05400b951e 100644 --- a/tests/system/providers/amazon/aws/example_sagemaker.py +++ b/providers/tests/system/amazon/aws/example_sagemaker.py @@ -52,7 +52,8 @@ SageMakerTuningSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs logger = logging.getLogger(__name__) @@ -645,13 +646,13 @@ def delete_docker_image(image_name): log_cleanup, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sagemaker_endpoint.py b/providers/tests/system/amazon/aws/example_sagemaker_endpoint.py similarity index 97% rename from tests/system/providers/amazon/aws/example_sagemaker_endpoint.py rename to providers/tests/system/amazon/aws/example_sagemaker_endpoint.py index 226b690cbf409..fecaa92264b6e 100644 --- a/tests/system/providers/amazon/aws/example_sagemaker_endpoint.py +++ b/providers/tests/system/amazon/aws/example_sagemaker_endpoint.py @@ -38,7 +38,8 @@ ) from airflow.providers.amazon.aws.sensors.sagemaker import SageMakerEndpointSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder, prune_logs DAG_ID = "example_sagemaker_endpoint" @@ -58,7 +59,7 @@ } # For an example of how to obtain the following train and test data, please see -# https://github.com/apache/airflow/blob/main/tests/system/providers/amazon/aws/example_sagemaker.py +# https://github.com/apache/airflow/blob/main/providers/tests/system/amazon/aws/example_sagemaker.py TRAIN_DATA = "0,4.9,2.5,4.5,1.7\n1,7.0,3.2,4.7,1.4\n0,7.3,2.9,6.3,1.8\n2,5.1,3.5,1.4,0.2\n" SAMPLE_TEST_DATA = "6.4,3.2,4.5,1.5" @@ -290,13 +291,13 @@ def set_up(env_id, role_arn, ti=None): archive_logs(f"/aws/sagemaker/Endpoints/{test_setup['endpoint_name']}"), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sagemaker_notebook.py b/providers/tests/system/amazon/aws/example_sagemaker_notebook.py similarity index 94% rename from tests/system/providers/amazon/aws/example_sagemaker_notebook.py rename to providers/tests/system/amazon/aws/example_sagemaker_notebook.py index b61bfa822bb04..76d35dc2b1479 100644 --- a/tests/system/providers/amazon/aws/example_sagemaker_notebook.py +++ b/providers/tests/system/amazon/aws/example_sagemaker_notebook.py @@ -26,7 +26,8 @@ SageMakerStartNoteBookOperator, SageMakerStopNotebookOperator, ) -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_sagemaker_notebook" @@ -96,13 +97,13 @@ delete_instance, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sagemaker_pipeline.py b/providers/tests/system/amazon/aws/example_sagemaker_pipeline.py similarity index 97% rename from tests/system/providers/amazon/aws/example_sagemaker_pipeline.py rename to providers/tests/system/amazon/aws/example_sagemaker_pipeline.py index 3f32511702401..614fb5c13cc9e 100644 --- a/tests/system/providers/amazon/aws/example_sagemaker_pipeline.py +++ b/providers/tests/system/amazon/aws/example_sagemaker_pipeline.py @@ -31,8 +31,9 @@ SageMakerPipelineSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.example_sagemaker import delete_experiments -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.example_sagemaker import delete_experiments +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_sagemaker_pipeline" @@ -116,13 +117,13 @@ def delete_pipeline(name: str): delete_experiments([pipeline_name]), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_salesforce_to_s3.py b/providers/tests/system/amazon/aws/example_salesforce_to_s3.py similarity index 93% rename from tests/system/providers/amazon/aws/example_salesforce_to_s3.py rename to providers/tests/system/amazon/aws/example_salesforce_to_s3.py index 112be85c5ad29..7ee15f7030beb 100644 --- a/tests/system/providers/amazon/aws/example_salesforce_to_s3.py +++ b/providers/tests/system/amazon/aws/example_salesforce_to_s3.py @@ -28,7 +28,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.salesforce_to_s3 import SalesforceToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -77,13 +78,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sftp_to_s3.py b/providers/tests/system/amazon/aws/example_sftp_to_s3.py similarity index 92% rename from tests/system/providers/amazon/aws/example_sftp_to_s3.py rename to providers/tests/system/amazon/aws/example_sftp_to_s3.py index c5cae1ebfa68c..85dac988cb82e 100644 --- a/tests/system/providers/amazon/aws/example_sftp_to_s3.py +++ b/providers/tests/system/amazon/aws/example_sftp_to_s3.py @@ -23,7 +23,8 @@ from airflow.providers.amazon.aws.operators.s3 import S3CreateBucketOperator, S3DeleteBucketOperator from airflow.providers.amazon.aws.transfers.sftp_to_s3 import SFTPToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -70,13 +71,13 @@ delete_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sns.py b/providers/tests/system/amazon/aws/example_sns.py similarity index 92% rename from tests/system/providers/amazon/aws/example_sns.py rename to providers/tests/system/amazon/aws/example_sns.py index 4cfb2b89153c8..d00227536da0b 100644 --- a/tests/system/providers/amazon/aws/example_sns.py +++ b/providers/tests/system/amazon/aws/example_sns.py @@ -25,7 +25,8 @@ from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.sns import SnsPublishOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -74,14 +75,14 @@ def delete_topic(topic_arn) -> None: delete_topic(create_sns_topic), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sql_to_s3.py b/providers/tests/system/amazon/aws/example_sql_to_s3.py similarity index 96% rename from tests/system/providers/amazon/aws/example_sql_to_s3.py rename to providers/tests/system/amazon/aws/example_sql_to_s3.py index d52ca3630b9d5..dd333073a0431 100644 --- a/tests/system/providers/amazon/aws/example_sql_to_s3.py +++ b/providers/tests/system/amazon/aws/example_sql_to_s3.py @@ -35,7 +35,8 @@ from airflow.providers.amazon.aws.sensors.redshift_cluster import RedshiftClusterSensor from airflow.providers.amazon.aws.transfers.sql_to_s3 import SqlToS3Operator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_sql_to_s3" @@ -198,14 +199,14 @@ def create_connection(conn_id_name: str, cluster_id: str): delete_cluster, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_sqs.py b/providers/tests/system/amazon/aws/example_sqs.py similarity index 94% rename from tests/system/providers/amazon/aws/example_sqs.py rename to providers/tests/system/amazon/aws/example_sqs.py index 0a14d5c446408..2aba6a580c4d7 100644 --- a/tests/system/providers/amazon/aws/example_sqs.py +++ b/providers/tests/system/amazon/aws/example_sqs.py @@ -25,7 +25,8 @@ from airflow.providers.amazon.aws.operators.sqs import SqsPublishOperator from airflow.providers.amazon.aws.sensors.sqs import SqsSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -99,14 +100,14 @@ def delete_queue(queue_url): delete_queue(sqs_queue), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/example_step_functions.py b/providers/tests/system/amazon/aws/example_step_functions.py similarity index 95% rename from tests/system/providers/amazon/aws/example_step_functions.py rename to providers/tests/system/amazon/aws/example_step_functions.py index 1c0e90979d694..beeb12528c7ac 100644 --- a/tests/system/providers/amazon/aws/example_step_functions.py +++ b/providers/tests/system/amazon/aws/example_step_functions.py @@ -28,7 +28,8 @@ StepFunctionStartExecutionOperator, ) from airflow.providers.amazon.aws.sensors.step_function import StepFunctionExecutionSensor -from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder DAG_ID = "example_step_functions" @@ -111,13 +112,13 @@ def delete_state_machine(state_machine_arn): delete_state_machine(state_machine_arn), ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/trino/assets/__init__.py b/providers/tests/system/amazon/aws/tests/__init__.py similarity index 100% rename from tests/providers/trino/assets/__init__.py rename to providers/tests/system/amazon/aws/tests/__init__.py diff --git a/tests/system/providers/amazon/aws/tests/test_aws_auth_manager.py b/providers/tests/system/amazon/aws/tests/test_aws_auth_manager.py similarity index 91% rename from tests/system/providers/amazon/aws/tests/test_aws_auth_manager.py rename to providers/tests/system/amazon/aws/tests/test_aws_auth_manager.py index dac7398a1ba8f..774aec21b2b6b 100644 --- a/tests/system/providers/amazon/aws/tests/test_aws_auth_manager.py +++ b/providers/tests/system/amazon/aws/tests/test_aws_auth_manager.py @@ -23,9 +23,10 @@ import pytest from airflow.www import app as application -from tests.system.providers.amazon.aws.utils import set_env_id -from tests.test_utils.config import conf_vars -from tests.test_utils.www import check_content_in_response + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.www import check_content_in_response +from providers.tests.system.amazon.aws.utils import set_env_id SAML_METADATA_URL = "/saml/metadata" SAML_METADATA_PARSED = { @@ -124,11 +125,14 @@ def base_app(region_name, avp_policy_store_id): ("aws_auth_manager", "avp_policy_store_id"): avp_policy_store_id, } ): - with patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" - ) as mock_parser, patch( - "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" - ) as mock_init_saml_auth: + with ( + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.OneLogin_Saml2_IdPMetadataParser" + ) as mock_parser, + patch( + "airflow.providers.amazon.aws.auth_manager.views.auth.AwsAuthManagerAuthenticationViews._init_saml_auth" + ) as mock_init_saml_auth, + ): mock_parser.parse_remote.return_value = SAML_METADATA_PARSED yield mock_init_saml_auth diff --git a/tests/system/providers/amazon/aws/utils/__init__.py b/providers/tests/system/amazon/aws/utils/__init__.py similarity index 99% rename from tests/system/providers/amazon/aws/utils/__init__.py rename to providers/tests/system/amazon/aws/utils/__init__.py index 8b4114fc90ad0..1b6b5cc47c1dc 100644 --- a/tests/system/providers/amazon/aws/utils/__init__.py +++ b/providers/tests/system/amazon/aws/utils/__init__.py @@ -48,7 +48,7 @@ INVALID_ENV_ID_MSG: str = ( "To maximize compatibility, the SYSTEM_TESTS_ENV_ID must be an alphanumeric string " - "which starts with a letter. Please see `tests/system/providers/amazon/README.md`." + "which starts with a letter. Please see `providers/tests/system/amazon/README.md`." ) LOWERCASE_ENV_ID_MSG: str = ( "The provided Environment ID contains uppercase letters and " diff --git a/tests/system/providers/amazon/aws/utils/ec2.py b/providers/tests/system/amazon/aws/utils/ec2.py similarity index 100% rename from tests/system/providers/amazon/aws/utils/ec2.py rename to providers/tests/system/amazon/aws/utils/ec2.py diff --git a/tests/system/providers/amazon/aws/utils/k8s.py b/providers/tests/system/amazon/aws/utils/k8s.py similarity index 100% rename from tests/system/providers/amazon/aws/utils/k8s.py rename to providers/tests/system/amazon/aws/utils/k8s.py diff --git a/tests/providers/trino/operators/__init__.py b/providers/tests/system/apache/__init__.py similarity index 100% rename from tests/providers/trino/operators/__init__.py rename to providers/tests/system/apache/__init__.py diff --git a/tests/providers/tableau/hooks/__init__.py b/providers/tests/system/apache/beam/__init__.py similarity index 100% rename from tests/providers/tableau/hooks/__init__.py rename to providers/tests/system/apache/beam/__init__.py diff --git a/tests/system/providers/apache/beam/example_beam.py b/providers/tests/system/apache/beam/example_beam.py similarity index 95% rename from tests/system/providers/apache/beam/example_beam.py rename to providers/tests/system/apache/beam/example_beam.py index b0c62bbc00baa..4166caa3477a4 100644 --- a/tests/system/providers/apache/beam/example_beam.py +++ b/providers/tests/system/apache/beam/example_beam.py @@ -24,7 +24,8 @@ from airflow import models from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( GCS_INPUT, GCS_JAR_DIRECT_RUNNER_BUCKET_NAME, GCS_JAR_DIRECT_RUNNER_OBJECT_NAME, @@ -60,7 +61,7 @@ # [END howto_operator_start_java_direct_runner_pipeline] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_beam_java_flink.py b/providers/tests/system/apache/beam/example_beam_java_flink.py similarity index 94% rename from tests/system/providers/apache/beam/example_beam_java_flink.py rename to providers/tests/system/apache/beam/example_beam_java_flink.py index 42cfad2bdc8c6..a68637d8ef8d9 100644 --- a/tests/system/providers/apache/beam/example_beam_java_flink.py +++ b/providers/tests/system/apache/beam/example_beam_java_flink.py @@ -24,7 +24,8 @@ from airflow import models from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( GCS_INPUT, GCS_JAR_FLINK_RUNNER_BUCKET_NAME, GCS_JAR_FLINK_RUNNER_OBJECT_NAME, @@ -59,7 +60,7 @@ jar_to_local_flink_runner >> start_java_pipeline_flink_runner -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_beam_java_spark.py b/providers/tests/system/apache/beam/example_beam_java_spark.py similarity index 94% rename from tests/system/providers/apache/beam/example_beam_java_spark.py rename to providers/tests/system/apache/beam/example_beam_java_spark.py index 29259dd63a253..eb3fea6d16dab 100644 --- a/tests/system/providers/apache/beam/example_beam_java_spark.py +++ b/providers/tests/system/apache/beam/example_beam_java_spark.py @@ -24,7 +24,8 @@ from airflow import models from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( GCS_INPUT, GCS_JAR_SPARK_RUNNER_BUCKET_NAME, GCS_JAR_SPARK_RUNNER_OBJECT_NAME, @@ -59,7 +60,7 @@ jar_to_local_spark_runner >> start_java_pipeline_spark_runner -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_go.py b/providers/tests/system/apache/beam/example_go.py similarity index 96% rename from tests/system/providers/apache/beam/example_go.py rename to providers/tests/system/apache/beam/example_go.py index f5fc70ff1e917..792eee088bf6d 100644 --- a/tests/system/providers/apache/beam/example_go.py +++ b/providers/tests/system/apache/beam/example_go.py @@ -24,7 +24,8 @@ from airflow import models from airflow.providers.apache.beam.operators.beam import BeamRunGoPipelineOperator from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_GO, @@ -102,7 +103,7 @@ ) -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_go_dataflow.py b/providers/tests/system/apache/beam/example_go_dataflow.py similarity index 95% rename from tests/system/providers/apache/beam/example_go_dataflow.py rename to providers/tests/system/apache/beam/example_go_dataflow.py index 4cc81d9263042..35a63cb56e1e7 100644 --- a/tests/system/providers/apache/beam/example_go_dataflow.py +++ b/providers/tests/system/apache/beam/example_go_dataflow.py @@ -26,7 +26,8 @@ from airflow.providers.google.cloud.hooks.dataflow import DataflowJobStatus from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration from airflow.providers.google.cloud.sensors.dataflow import DataflowJobStatusSensor -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_GO_DATAFLOW_ASYNC, @@ -75,7 +76,7 @@ # [END howto_operator_start_go_dataflow_runner_pipeline_async_gcs_file] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_java_dataflow.py b/providers/tests/system/apache/beam/example_java_dataflow.py similarity index 95% rename from tests/system/providers/apache/beam/example_java_dataflow.py rename to providers/tests/system/apache/beam/example_java_dataflow.py index 0a941eb32a5a3..334142dfd315c 100644 --- a/tests/system/providers/apache/beam/example_java_dataflow.py +++ b/providers/tests/system/apache/beam/example_java_dataflow.py @@ -24,7 +24,8 @@ from airflow import models from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( GCS_JAR_DATAFLOW_RUNNER_BUCKET_NAME, GCS_JAR_DATAFLOW_RUNNER_OBJECT_NAME, GCS_OUTPUT, @@ -65,7 +66,7 @@ # [END howto_operator_start_java_dataflow_runner_pipeline] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_python.py b/providers/tests/system/apache/beam/example_python.py similarity index 97% rename from tests/system/providers/apache/beam/example_python.py rename to providers/tests/system/apache/beam/example_python.py index 12907df08f5ac..d685cb33d2dad 100644 --- a/tests/system/providers/apache/beam/example_python.py +++ b/providers/tests/system/apache/beam/example_python.py @@ -24,7 +24,8 @@ from airflow import models from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_OUTPUT, @@ -119,7 +120,7 @@ ) -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_python_async.py b/providers/tests/system/apache/beam/example_python_async.py similarity index 97% rename from tests/system/providers/apache/beam/example_python_async.py rename to providers/tests/system/apache/beam/example_python_async.py index ab2e9a8063ea4..8465278b6bf76 100644 --- a/tests/system/providers/apache/beam/example_python_async.py +++ b/providers/tests/system/apache/beam/example_python_async.py @@ -24,7 +24,8 @@ from airflow import models from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_OUTPUT, @@ -128,7 +129,7 @@ ) -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/example_python_dataflow.py b/providers/tests/system/apache/beam/example_python_dataflow.py similarity index 96% rename from tests/system/providers/apache/beam/example_python_dataflow.py rename to providers/tests/system/apache/beam/example_python_dataflow.py index 9b849e3900d9c..36d6b3b8562e3 100644 --- a/tests/system/providers/apache/beam/example_python_dataflow.py +++ b/providers/tests/system/apache/beam/example_python_dataflow.py @@ -26,7 +26,8 @@ from airflow.providers.google.cloud.hooks.dataflow import DataflowJobStatus from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration from airflow.providers.google.cloud.sensors.dataflow import DataflowJobStatusSensor -from tests.system.providers.apache.beam.utils import ( + +from providers.tests.system.apache.beam.utils import ( DEFAULT_ARGS, GCP_PROJECT_ID, GCS_OUTPUT, @@ -78,7 +79,7 @@ # [END howto_operator_start_python_dataflow_runner_pipeline_async_gcs_file] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/beam/utils.py b/providers/tests/system/apache/beam/utils.py similarity index 100% rename from tests/system/providers/apache/beam/utils.py rename to providers/tests/system/apache/beam/utils.py diff --git a/tests/providers/weaviate/__init__.py b/providers/tests/system/apache/cassandra/__init__.py similarity index 100% rename from tests/providers/weaviate/__init__.py rename to providers/tests/system/apache/cassandra/__init__.py diff --git a/tests/system/providers/apache/cassandra/example_cassandra_dag.py b/providers/tests/system/apache/cassandra/example_cassandra_dag.py similarity index 96% rename from tests/system/providers/apache/cassandra/example_cassandra_dag.py rename to providers/tests/system/apache/cassandra/example_cassandra_dag.py index 79e9918b446bf..0b25fa2975dbd 100644 --- a/tests/system/providers/apache/cassandra/example_cassandra_dag.py +++ b/providers/tests/system/apache/cassandra/example_cassandra_dag.py @@ -50,7 +50,7 @@ record_sensor = CassandraRecordSensor(task_id="cassandra_record_sensor", keys={"p1": "v1", "p2": "v2"}) # [END howto_operator_cassandra_sensors] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/weaviate/hooks/__init__.py b/providers/tests/system/apache/drill/__init__.py similarity index 100% rename from tests/providers/weaviate/hooks/__init__.py rename to providers/tests/system/apache/drill/__init__.py diff --git a/tests/system/providers/apache/drill/example_drill_dag.py b/providers/tests/system/apache/drill/example_drill_dag.py similarity index 95% rename from tests/system/providers/apache/drill/example_drill_dag.py rename to providers/tests/system/apache/drill/example_drill_dag.py index 62f332f318a8c..fdaf376d5a162 100644 --- a/tests/system/providers/apache/drill/example_drill_dag.py +++ b/providers/tests/system/apache/drill/example_drill_dag.py @@ -47,7 +47,7 @@ ) # [END howto_operator_drill] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/weaviate/operators/__init__.py b/providers/tests/system/apache/druid/__init__.py similarity index 100% rename from tests/providers/weaviate/operators/__init__.py rename to providers/tests/system/apache/druid/__init__.py diff --git a/tests/system/providers/apache/druid/example_druid_dag.py b/providers/tests/system/apache/druid/example_druid_dag.py similarity index 96% rename from tests/system/providers/apache/druid/example_druid_dag.py rename to providers/tests/system/apache/druid/example_druid_dag.py index 1426a12456bae..5e1bfc446bb09 100644 --- a/tests/system/providers/apache/druid/example_druid_dag.py +++ b/providers/tests/system/apache/druid/example_druid_dag.py @@ -55,7 +55,7 @@ """ # [END howto_operator_druid_submit] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/telegram/__init__.py b/providers/tests/system/apache/hive/__init__.py similarity index 100% rename from tests/providers/telegram/__init__.py rename to providers/tests/system/apache/hive/__init__.py diff --git a/tests/system/providers/apache/hive/example_twitter_README.md b/providers/tests/system/apache/hive/example_twitter_README.md similarity index 100% rename from tests/system/providers/apache/hive/example_twitter_README.md rename to providers/tests/system/apache/hive/example_twitter_README.md diff --git a/tests/system/providers/apache/hive/example_twitter_dag.py b/providers/tests/system/apache/hive/example_twitter_dag.py similarity index 97% rename from tests/system/providers/apache/hive/example_twitter_dag.py rename to providers/tests/system/apache/hive/example_twitter_dag.py index 4ceb119ba551c..129a8ebf9f77f 100644 --- a/tests/system/providers/apache/hive/example_twitter_dag.py +++ b/providers/tests/system/apache/hive/example_twitter_dag.py @@ -154,13 +154,13 @@ def transfer_to_db(): analyze >> load_to_hdfs >> load_to_hive >> hive_to_mysql - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/telegram/hooks/__init__.py b/providers/tests/system/apache/iceberg/__init__.py similarity index 100% rename from tests/providers/telegram/hooks/__init__.py rename to providers/tests/system/apache/iceberg/__init__.py diff --git a/tests/system/providers/apache/iceberg/example_iceberg.py b/providers/tests/system/apache/iceberg/example_iceberg.py similarity index 95% rename from tests/system/providers/apache/iceberg/example_iceberg.py rename to providers/tests/system/apache/iceberg/example_iceberg.py index 41e751624b5c0..a4708805b6788 100644 --- a/tests/system/providers/apache/iceberg/example_iceberg.py +++ b/providers/tests/system/apache/iceberg/example_iceberg.py @@ -49,7 +49,7 @@ ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/yandex/__init__.py b/providers/tests/system/apache/kafka/__init__.py similarity index 100% rename from tests/providers/yandex/__init__.py rename to providers/tests/system/apache/kafka/__init__.py diff --git a/tests/system/providers/apache/kafka/example_dag_event_listener.py b/providers/tests/system/apache/kafka/example_dag_event_listener.py similarity index 97% rename from tests/system/providers/apache/kafka/example_dag_event_listener.py rename to providers/tests/system/apache/kafka/example_dag_event_listener.py index 24d8177ce8cab..eb6866735e830 100644 --- a/tests/system/providers/apache/kafka/example_dag_event_listener.py +++ b/providers/tests/system/apache/kafka/example_dag_event_listener.py @@ -120,7 +120,7 @@ def wait_for_event(message, **context): t0 >> t1 -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/kafka/example_dag_hello_kafka.py b/providers/tests/system/apache/kafka/example_dag_hello_kafka.py similarity index 98% rename from tests/system/providers/apache/kafka/example_dag_hello_kafka.py rename to providers/tests/system/apache/kafka/example_dag_hello_kafka.py index fc7078c3dc5a3..5e70d7324a1a0 100644 --- a/tests/system/providers/apache/kafka/example_dag_hello_kafka.py +++ b/providers/tests/system/apache/kafka/example_dag_hello_kafka.py @@ -240,7 +240,7 @@ def hello_kafka(): t0 >> t3 >> [t4, t4b] >> t5 >> t6 -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/yandex/hooks/__init__.py b/providers/tests/system/apache/kylin/__init__.py similarity index 100% rename from tests/providers/yandex/hooks/__init__.py rename to providers/tests/system/apache/kylin/__init__.py diff --git a/tests/system/providers/apache/kylin/example_kylin_dag.py b/providers/tests/system/apache/kylin/example_kylin_dag.py similarity index 96% rename from tests/system/providers/apache/kylin/example_kylin_dag.py rename to providers/tests/system/apache/kylin/example_kylin_dag.py index c851ad7c5e3df..1bfec2cd1a7dd 100644 --- a/tests/system/providers/apache/kylin/example_kylin_dag.py +++ b/providers/tests/system/apache/kylin/example_kylin_dag.py @@ -109,13 +109,13 @@ def gen_build_time(): # gen_build_time >> refresh_task1 # gen_build_time >> merge_task # gen_build_time >> build_task3 - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/yandex/links/__init__.py b/providers/tests/system/apache/livy/__init__.py similarity index 100% rename from tests/providers/yandex/links/__init__.py rename to providers/tests/system/apache/livy/__init__.py diff --git a/tests/system/providers/apache/livy/example_livy.py b/providers/tests/system/apache/livy/example_livy.py similarity index 95% rename from tests/system/providers/apache/livy/example_livy.py rename to providers/tests/system/apache/livy/example_livy.py index 507bbaefede43..09f8f7e244e02 100644 --- a/tests/system/providers/apache/livy/example_livy.py +++ b/providers/tests/system/apache/livy/example_livy.py @@ -73,13 +73,13 @@ livy_java_task_deferrable >> livy_python_task_deferrable # [END create_livy_deferrable] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/yandex/operators/__init__.py b/providers/tests/system/apache/pig/__init__.py similarity index 100% rename from tests/providers/yandex/operators/__init__.py rename to providers/tests/system/apache/pig/__init__.py diff --git a/tests/system/providers/apache/pig/example_pig.py b/providers/tests/system/apache/pig/example_pig.py similarity index 95% rename from tests/system/providers/apache/pig/example_pig.py rename to providers/tests/system/apache/pig/example_pig.py index 938987f32e78f..bb556a85111d1 100644 --- a/tests/system/providers/apache/pig/example_pig.py +++ b/providers/tests/system/apache/pig/example_pig.py @@ -44,7 +44,7 @@ # [END create_pig] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/yandex/secrets/__init__.py b/providers/tests/system/apache/pinot/__init__.py similarity index 100% rename from tests/providers/yandex/secrets/__init__.py rename to providers/tests/system/apache/pinot/__init__.py diff --git a/tests/system/providers/apache/pinot/example_pinot_dag.py b/providers/tests/system/apache/pinot/example_pinot_dag.py similarity index 96% rename from tests/system/providers/apache/pinot/example_pinot_dag.py rename to providers/tests/system/apache/pinot/example_pinot_dag.py index eecd6a4c2abed..d95022f366910 100644 --- a/tests/system/providers/apache/pinot/example_pinot_dag.py +++ b/providers/tests/system/apache/pinot/example_pinot_dag.py @@ -52,7 +52,7 @@ def pinot_dbi_api(): pinot_admin() pinot_dbi_api() -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/yandex/utils/__init__.py b/providers/tests/system/apache/spark/__init__.py similarity index 100% rename from tests/providers/yandex/utils/__init__.py rename to providers/tests/system/apache/spark/__init__.py diff --git a/tests/system/providers/apache/spark/example_pyspark.py b/providers/tests/system/apache/spark/example_pyspark.py similarity index 96% rename from tests/system/providers/apache/spark/example_pyspark.py rename to providers/tests/system/apache/spark/example_pyspark.py index cc18911a38c72..0ca14a76f48f9 100644 --- a/tests/system/providers/apache/spark/example_pyspark.py +++ b/providers/tests/system/apache/spark/example_pyspark.py @@ -70,7 +70,7 @@ def print_df(df: pd.DataFrame): dag = example_pyspark() # type: ignore -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/spark/example_spark_dag.py b/providers/tests/system/apache/spark/example_spark_dag.py similarity index 97% rename from tests/system/providers/apache/spark/example_spark_dag.py rename to providers/tests/system/apache/spark/example_spark_dag.py index 00bb415585c0b..b68fc2cf761a9 100644 --- a/tests/system/providers/apache/spark/example_spark_dag.py +++ b/providers/tests/system/apache/spark/example_spark_dag.py @@ -75,7 +75,7 @@ ) # [END howto_operator_spark_sql] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/ydb/__init__.py b/providers/tests/system/asana/__init__.py similarity index 100% rename from tests/providers/ydb/__init__.py rename to providers/tests/system/asana/__init__.py diff --git a/tests/system/providers/asana/example_asana.py b/providers/tests/system/asana/example_asana.py similarity index 97% rename from tests/system/providers/asana/example_asana.py rename to providers/tests/system/asana/example_asana.py index 79a3636a1cb77..ca68b6cc3dd2e 100644 --- a/tests/system/providers/asana/example_asana.py +++ b/providers/tests/system/asana/example_asana.py @@ -100,13 +100,13 @@ create >> find >> update >> delete # [END asana_example_dag] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/ydb/hooks/__init__.py b/providers/tests/system/cncf/__init__.py similarity index 100% rename from tests/providers/ydb/hooks/__init__.py rename to providers/tests/system/cncf/__init__.py diff --git a/tests/providers/ydb/operators/__init__.py b/providers/tests/system/cncf/kubernetes/__init__.py similarity index 100% rename from tests/providers/ydb/operators/__init__.py rename to providers/tests/system/cncf/kubernetes/__init__.py diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes.py b/providers/tests/system/cncf/kubernetes/example_kubernetes.py similarity index 97% rename from tests/system/providers/cncf/kubernetes/example_kubernetes.py rename to providers/tests/system/cncf/kubernetes/example_kubernetes.py index 3756d0c4e21d8..bed3b9da746b9 100644 --- a/tests/system/providers/cncf/kubernetes/example_kubernetes.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes.py @@ -167,13 +167,13 @@ write_xcom >> pod_task_xcom_result # [END howto_operator_k8s_write_xcom] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py b/providers/tests/system/cncf/kubernetes/example_kubernetes_async.py similarity index 98% rename from tests/system/providers/cncf/kubernetes/example_kubernetes_async.py rename to providers/tests/system/cncf/kubernetes/example_kubernetes_async.py index cb3d25a33fcbc..43eb0a1f919cf 100644 --- a/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes_async.py @@ -198,13 +198,13 @@ write_xcom_async >> pod_task_xcom_result_async # [END howto_operator_k8s_write_xcom_async] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes_decorator.py b/providers/tests/system/cncf/kubernetes/example_kubernetes_decorator.py similarity index 97% rename from tests/system/providers/cncf/kubernetes/example_kubernetes_decorator.py rename to providers/tests/system/cncf/kubernetes/example_kubernetes_decorator.py index 20fd7d5f74fa7..088a3fdae2ede 100644 --- a/tests/system/providers/cncf/kubernetes/example_kubernetes_decorator.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes_decorator.py @@ -63,7 +63,7 @@ def print_pattern(): # [END howto_operator_kubernetes] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes_job.py b/providers/tests/system/cncf/kubernetes/example_kubernetes_job.py similarity index 95% rename from tests/system/providers/cncf/kubernetes/example_kubernetes_job.py rename to providers/tests/system/cncf/kubernetes/example_kubernetes_job.py index bf017120674bc..1a2432c135898 100644 --- a/tests/system/providers/cncf/kubernetes/example_kubernetes_job.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes_job.py @@ -94,13 +94,13 @@ k8s_job >> update_job >> delete_job_task k8s_job_def >> delete_job_task_def - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes_resource.py b/providers/tests/system/cncf/kubernetes/example_kubernetes_resource.py similarity index 94% rename from tests/system/providers/cncf/kubernetes/example_kubernetes_resource.py rename to providers/tests/system/cncf/kubernetes/example_kubernetes_resource.py index 2133d6e7a79af..1c24a533a85f0 100644 --- a/tests/system/providers/cncf/kubernetes/example_kubernetes_resource.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes_resource.py @@ -72,13 +72,13 @@ t1 >> t2 - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cncf/kubernetes/example_spark_kubernetes.py b/providers/tests/system/cncf/kubernetes/example_spark_kubernetes.py similarity index 95% rename from tests/system/providers/cncf/kubernetes/example_spark_kubernetes.py rename to providers/tests/system/cncf/kubernetes/example_spark_kubernetes.py index 4584d653e4fa1..072cffeb90f04 100644 --- a/tests/system/providers/cncf/kubernetes/example_spark_kubernetes.py +++ b/providers/tests/system/cncf/kubernetes/example_spark_kubernetes.py @@ -76,13 +76,13 @@ t1 >> t2 # [END SparkKubernetesOperator_DAG] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cncf/kubernetes/example_spark_kubernetes_spark_pi.yaml b/providers/tests/system/cncf/kubernetes/example_spark_kubernetes_spark_pi.yaml similarity index 100% rename from tests/system/providers/cncf/kubernetes/example_spark_kubernetes_spark_pi.yaml rename to providers/tests/system/cncf/kubernetes/example_spark_kubernetes_spark_pi.yaml diff --git a/tests/system/providers/cncf/kubernetes/spark_job_template.yaml b/providers/tests/system/cncf/kubernetes/spark_job_template.yaml similarity index 100% rename from tests/system/providers/cncf/kubernetes/spark_job_template.yaml rename to providers/tests/system/cncf/kubernetes/spark_job_template.yaml diff --git a/tests/providers/ydb/utils/__init__.py b/providers/tests/system/cohere/__init__.py similarity index 100% rename from tests/providers/ydb/utils/__init__.py rename to providers/tests/system/cohere/__init__.py diff --git a/tests/system/providers/cohere/example_cohere_embedding_operator.py b/providers/tests/system/cohere/example_cohere_embedding_operator.py similarity index 96% rename from tests/system/providers/cohere/example_cohere_embedding_operator.py rename to providers/tests/system/cohere/example_cohere_embedding_operator.py index ec97ee91e57cb..9686724f2cea9 100644 --- a/tests/system/providers/cohere/example_cohere_embedding_operator.py +++ b/providers/tests/system/cohere/example_cohere_embedding_operator.py @@ -34,7 +34,7 @@ # [END howto_operator_cohere_embedding] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/zendesk/__init__.py b/providers/tests/system/common/__init__.py similarity index 100% rename from tests/providers/zendesk/__init__.py rename to providers/tests/system/common/__init__.py diff --git a/tests/providers/zendesk/hooks/__init__.py b/providers/tests/system/common/io/__init__.py similarity index 100% rename from tests/providers/zendesk/hooks/__init__.py rename to providers/tests/system/common/io/__init__.py diff --git a/tests/system/providers/common/io/example_file_transfer_local_to_s3.py b/providers/tests/system/common/io/example_file_transfer_local_to_s3.py similarity index 94% rename from tests/system/providers/common/io/example_file_transfer_local_to_s3.py rename to providers/tests/system/common/io/example_file_transfer_local_to_s3.py index 13c495e620972..0b90a75f2e6e2 100644 --- a/tests/system/providers/common/io/example_file_transfer_local_to_s3.py +++ b/providers/tests/system/common/io/example_file_transfer_local_to_s3.py @@ -76,12 +76,12 @@ def remove_bucket(): temp_file >> transfer >> remove_bucket() >> delete_temp_file(temp_file_path) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/airbyte/__init__.py b/providers/tests/system/common/sql/__init__.py similarity index 100% rename from tests/system/providers/airbyte/__init__.py rename to providers/tests/system/common/sql/__init__.py diff --git a/tests/system/providers/common/sql/example_sql_column_table_check.py b/providers/tests/system/common/sql/example_sql_column_table_check.py similarity index 97% rename from tests/system/providers/common/sql/example_sql_column_table_check.py rename to providers/tests/system/common/sql/example_sql_column_table_check.py index 2790d092a2b47..c48dc60bfc639 100644 --- a/tests/system/providers/common/sql/example_sql_column_table_check.py +++ b/providers/tests/system/common/sql/example_sql_column_table_check.py @@ -79,7 +79,7 @@ column_check >> row_count_check -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/common/sql/example_sql_execute_query.py b/providers/tests/system/common/sql/example_sql_execute_query.py similarity index 96% rename from tests/system/providers/common/sql/example_sql_execute_query.py rename to providers/tests/system/common/sql/example_sql_execute_query.py index 694ca3f2ef99c..8dced19cef719 100644 --- a/tests/system/providers/common/sql/example_sql_execute_query.py +++ b/providers/tests/system/common/sql/example_sql_execute_query.py @@ -56,7 +56,7 @@ # [END howto_operator_sql_execute_query] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/alibaba/__init__.py b/providers/tests/system/databricks/__init__.py similarity index 100% rename from tests/system/providers/alibaba/__init__.py rename to providers/tests/system/databricks/__init__.py diff --git a/tests/system/providers/databricks/example_databricks.py b/providers/tests/system/databricks/example_databricks.py similarity index 98% rename from tests/system/providers/databricks/example_databricks.py rename to providers/tests/system/databricks/example_databricks.py index 82c5d313421a9..00d9969bd99b3 100644 --- a/tests/system/providers/databricks/example_databricks.py +++ b/providers/tests/system/databricks/example_databricks.py @@ -238,13 +238,13 @@ ) # [END howto_operator_databricks_task_sql] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/databricks/example_databricks_repos.py b/providers/tests/system/databricks/example_databricks_repos.py similarity index 95% rename from tests/system/providers/databricks/example_databricks_repos.py rename to providers/tests/system/databricks/example_databricks_repos.py index dd583c67b9c90..d07226e76ed01 100644 --- a/tests/system/providers/databricks/example_databricks_repos.py +++ b/providers/tests/system/databricks/example_databricks_repos.py @@ -78,13 +78,13 @@ (create_repo >> update_repo >> notebook_task >> delete_repo) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/databricks/example_databricks_sensors.py b/providers/tests/system/databricks/example_databricks_sensors.py similarity index 96% rename from tests/system/providers/databricks/example_databricks_sensors.py rename to providers/tests/system/databricks/example_databricks_sensors.py index fd572a6bd9055..ea233a79395bc 100644 --- a/tests/system/providers/databricks/example_databricks_sensors.py +++ b/providers/tests/system/databricks/example_databricks_sensors.py @@ -88,7 +88,7 @@ # runs, else all the subsequent DAG tasks and the DAG are marked as failed. (sql_sensor >> partition_sensor) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This example does not need a watcher in order to properly mark success/failure # since it is a single task, but it is given here as an example for users to @@ -96,7 +96,7 @@ # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/databricks/example_databricks_sql.py b/providers/tests/system/databricks/example_databricks_sql.py similarity index 97% rename from tests/system/providers/databricks/example_databricks_sql.py rename to providers/tests/system/databricks/example_databricks_sql.py index 3f7ed0858c82e..f08ce3cd56b65 100644 --- a/tests/system/providers/databricks/example_databricks_sql.py +++ b/providers/tests/system/databricks/example_databricks_sql.py @@ -113,13 +113,13 @@ (create >> create_file >> import_csv >> select >> select_into_file) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/databricks/example_databricks_workflow.py b/providers/tests/system/databricks/example_databricks_workflow.py similarity index 97% rename from tests/system/providers/databricks/example_databricks_workflow.py rename to providers/tests/system/databricks/example_databricks_workflow.py index 1cfd81e9c7ec0..3a6c829520d89 100644 --- a/tests/system/providers/databricks/example_databricks_workflow.py +++ b/providers/tests/system/databricks/example_databricks_workflow.py @@ -143,13 +143,13 @@ notebook_1 >> notebook_2 >> task_operator_nb_1 >> sql_query # [END howto_databricks_workflow_notebook] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/amazon/aws/tests/__init__.py b/providers/tests/system/dbt/__init__.py similarity index 100% rename from tests/system/providers/amazon/aws/tests/__init__.py rename to providers/tests/system/dbt/__init__.py diff --git a/tests/system/providers/apache/cassandra/__init__.py b/providers/tests/system/dbt/cloud/__init__.py similarity index 100% rename from tests/system/providers/apache/cassandra/__init__.py rename to providers/tests/system/dbt/cloud/__init__.py diff --git a/tests/system/providers/dbt/cloud/example_dbt_cloud.py b/providers/tests/system/dbt/cloud/example_dbt_cloud.py similarity index 94% rename from tests/system/providers/dbt/cloud/example_dbt_cloud.py rename to providers/tests/system/dbt/cloud/example_dbt_cloud.py index 897abb293b5de..17f8a59efea02 100644 --- a/tests/system/providers/dbt/cloud/example_dbt_cloud.py +++ b/providers/tests/system/dbt/cloud/example_dbt_cloud.py @@ -27,7 +27,8 @@ ) from airflow.providers.dbt.cloud.sensors.dbt import DbtCloudJobRunSensor from airflow.utils.edgemodifier import Label -from tests.system.utils import get_test_env_id + +from dev.tests_common.test_utils.system_tests import get_test_env_id ENV_ID = get_test_env_id() DAG_ID = "example_dbt_cloud" @@ -91,13 +92,13 @@ # trigger_job_run2 >> job_run_sensor # trigger_job_run2 >> job_run_sensor_deferred - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/drill/__init__.py b/providers/tests/system/dingding/__init__.py similarity index 100% rename from tests/system/providers/apache/drill/__init__.py rename to providers/tests/system/dingding/__init__.py diff --git a/tests/system/providers/dingding/example_dingding.py b/providers/tests/system/dingding/example_dingding.py similarity index 98% rename from tests/system/providers/dingding/example_dingding.py rename to providers/tests/system/dingding/example_dingding.py index 240a732805d3e..4ecb41c68cc91 100644 --- a/tests/system/providers/dingding/example_dingding.py +++ b/providers/tests/system/dingding/example_dingding.py @@ -199,13 +199,13 @@ def failure_callback(context): >> msg_failure_callback ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/druid/__init__.py b/providers/tests/system/docker/__init__.py similarity index 100% rename from tests/system/providers/apache/druid/__init__.py rename to providers/tests/system/docker/__init__.py diff --git a/tests/system/providers/docker/example_docker.py b/providers/tests/system/docker/example_docker.py similarity index 96% rename from tests/system/providers/docker/example_docker.py rename to providers/tests/system/docker/example_docker.py index 18f7d2f0ea0c6..108813236ac62 100644 --- a/tests/system/providers/docker/example_docker.py +++ b/providers/tests/system/docker/example_docker.py @@ -57,7 +57,7 @@ t1 >> [t2, t3] >> t4 ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/docker/example_docker_copy_data.py b/providers/tests/system/docker/example_docker_copy_data.py similarity index 97% rename from tests/system/providers/docker/example_docker_copy_data.py rename to providers/tests/system/docker/example_docker_copy_data.py index 4e4e8466e501f..c270408390cfa 100644 --- a/tests/system/providers/docker/example_docker_copy_data.py +++ b/providers/tests/system/docker/example_docker_copy_data.py @@ -103,7 +103,7 @@ t_is_data_available >> t_move >> t_print ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/docker/example_docker_swarm.py b/providers/tests/system/docker/example_docker_swarm.py similarity index 95% rename from tests/system/providers/docker/example_docker_swarm.py rename to providers/tests/system/docker/example_docker_swarm.py index 6546c7d434aca..3a68dcea81673 100644 --- a/tests/system/providers/docker/example_docker_swarm.py +++ b/providers/tests/system/docker/example_docker_swarm.py @@ -47,7 +47,7 @@ t1 ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/docker/example_taskflow_api_docker_virtualenv.py b/providers/tests/system/docker/example_taskflow_api_docker_virtualenv.py similarity index 97% rename from tests/system/providers/docker/example_taskflow_api_docker_virtualenv.py rename to providers/tests/system/docker/example_taskflow_api_docker_virtualenv.py index 95c502a221073..3c8b528606e02 100644 --- a/tests/system/providers/docker/example_taskflow_api_docker_virtualenv.py +++ b/providers/tests/system/docker/example_taskflow_api_docker_virtualenv.py @@ -116,7 +116,7 @@ def load(total_order_value: float): tutorial_dag = tutorial_taskflow_api_docker_virtualenv() # [END dag_invocation] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/kafka/__init__.py b/providers/tests/system/elasticsearch/__init__.py similarity index 100% rename from tests/system/providers/apache/kafka/__init__.py rename to providers/tests/system/elasticsearch/__init__.py diff --git a/tests/system/providers/elasticsearch/example_elasticsearch_query.py b/providers/tests/system/elasticsearch/example_elasticsearch_query.py similarity index 97% rename from tests/system/providers/elasticsearch/example_elasticsearch_query.py rename to providers/tests/system/elasticsearch/example_elasticsearch_query.py index 31e9440e6cbed..93471d874c3c9 100644 --- a/tests/system/providers/elasticsearch/example_elasticsearch_query.py +++ b/providers/tests/system/elasticsearch/example_elasticsearch_query.py @@ -81,7 +81,7 @@ def use_elasticsearch_hook(): task_id="print_data_from_elasticsearch", python_callable=use_elasticsearch_hook ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/kylin/__init__.py b/providers/tests/system/ftp/__init__.py similarity index 100% rename from tests/system/providers/apache/kylin/__init__.py rename to providers/tests/system/ftp/__init__.py diff --git a/tests/system/providers/ftp/example_ftp.py b/providers/tests/system/ftp/example_ftp.py similarity index 95% rename from tests/system/providers/ftp/example_ftp.py rename to providers/tests/system/ftp/example_ftp.py index bdc9399e46887..8fb2e38806ae4 100644 --- a/tests/system/providers/ftp/example_ftp.py +++ b/providers/tests/system/ftp/example_ftp.py @@ -87,13 +87,13 @@ ftp_put >> ftp_get ftps_put >> ftps_get - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/livy/__init__.py b/providers/tests/system/github/__init__.py similarity index 100% rename from tests/system/providers/apache/livy/__init__.py rename to providers/tests/system/github/__init__.py diff --git a/tests/system/providers/github/example_github.py b/providers/tests/system/github/example_github.py similarity index 97% rename from tests/system/providers/github/example_github.py rename to providers/tests/system/github/example_github.py index 81a458021aa67..70eb8bf902f04 100644 --- a/tests/system/providers/github/example_github.py +++ b/providers/tests/system/github/example_github.py @@ -100,7 +100,7 @@ def tag_checker(repo: Any, tag_name: str) -> bool | None: # [END howto_operator_list_tags_github] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/README.md b/providers/tests/system/google/README.md similarity index 100% rename from tests/system/providers/google/README.md rename to providers/tests/system/google/README.md diff --git a/tests/system/providers/google/__init__.py b/providers/tests/system/google/__init__.py similarity index 100% rename from tests/system/providers/google/__init__.py rename to providers/tests/system/google/__init__.py diff --git a/tests/system/providers/apache/pig/__init__.py b/providers/tests/system/google/ads/__init__.py similarity index 100% rename from tests/system/providers/apache/pig/__init__.py rename to providers/tests/system/google/ads/__init__.py diff --git a/tests/system/providers/google/ads/example_ads.py b/providers/tests/system/google/ads/example_ads.py similarity index 95% rename from tests/system/providers/google/ads/example_ads.py rename to providers/tests/system/google/ads/example_ads.py index 0f4e6d3a3f4a7..162fd45334f67 100644 --- a/tests/system/providers/google/ads/example_ads.py +++ b/providers/tests/system/google/ads/example_ads.py @@ -29,7 +29,8 @@ from airflow.providers.google.ads.transfers.ads_to_gcs import GoogleAdsToGcsOperator from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_google_ads_env_variables] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -116,14 +117,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/apache/pinot/__init__.py b/providers/tests/system/google/cloud/__init__.py similarity index 100% rename from tests/system/providers/apache/pinot/__init__.py rename to providers/tests/system/google/cloud/__init__.py diff --git a/tests/system/providers/apache/spark/__init__.py b/providers/tests/system/google/cloud/automl/__init__.py similarity index 100% rename from tests/system/providers/apache/spark/__init__.py rename to providers/tests/system/google/cloud/automl/__init__.py diff --git a/tests/system/providers/google/cloud/automl/example_automl_dataset.py b/providers/tests/system/google/cloud/automl/example_automl_dataset.py similarity index 97% rename from tests/system/providers/google/cloud/automl/example_automl_dataset.py rename to providers/tests/system/google/cloud/automl/example_automl_dataset.py index f90caf32df0c4..49de111ffb358 100644 --- a/tests/system/providers/google/cloud/automl/example_automl_dataset.py +++ b/providers/tests/system/google/cloud/automl/example_automl_dataset.py @@ -162,14 +162,14 @@ def upload_updated_csv_file_to_gcs(): >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/automl/example_automl_translation.py b/providers/tests/system/google/cloud/automl/example_automl_translation.py similarity index 97% rename from tests/system/providers/google/cloud/automl/example_automl_translation.py rename to providers/tests/system/google/cloud/automl/example_automl_translation.py index cda70693fb57c..60dbf782f281e 100644 --- a/tests/system/providers/google/cloud/automl/example_automl_translation.py +++ b/providers/tests/system/google/cloud/automl/example_automl_translation.py @@ -188,14 +188,14 @@ def upload_csv_file_to_gcs(): >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/automl/example_automl_video_classification.py b/providers/tests/system/google/cloud/automl/example_automl_video_classification.py similarity index 97% rename from tests/system/providers/google/cloud/automl/example_automl_video_classification.py rename to providers/tests/system/google/cloud/automl/example_automl_video_classification.py index 21e4c4623768d..36853c3826037 100644 --- a/tests/system/providers/google/cloud/automl/example_automl_video_classification.py +++ b/providers/tests/system/google/cloud/automl/example_automl_video_classification.py @@ -158,13 +158,13 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/automl/example_automl_video_tracking.py b/providers/tests/system/google/cloud/automl/example_automl_video_tracking.py similarity index 97% rename from tests/system/providers/google/cloud/automl/example_automl_video_tracking.py rename to providers/tests/system/google/cloud/automl/example_automl_video_tracking.py index 36133b3674a89..65718f10b7697 100644 --- a/tests/system/providers/google/cloud/automl/example_automl_video_tracking.py +++ b/providers/tests/system/google/cloud/automl/example_automl_video_tracking.py @@ -158,13 +158,13 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/automl/example_automl_vision_classification.py b/providers/tests/system/google/cloud/automl/example_automl_vision_classification.py similarity index 97% rename from tests/system/providers/google/cloud/automl/example_automl_vision_classification.py rename to providers/tests/system/google/cloud/automl/example_automl_vision_classification.py index 2e21f252d8185..40e68d3291ce8 100644 --- a/tests/system/providers/google/cloud/automl/example_automl_vision_classification.py +++ b/providers/tests/system/google/cloud/automl/example_automl_vision_classification.py @@ -132,13 +132,13 @@ >> delete_image_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/automl/example_automl_vision_object_detection.py b/providers/tests/system/google/cloud/automl/example_automl_vision_object_detection.py similarity index 97% rename from tests/system/providers/google/cloud/automl/example_automl_vision_object_detection.py rename to providers/tests/system/google/cloud/automl/example_automl_vision_object_detection.py index 2f92e81d9aaf9..334e154cea4d1 100644 --- a/tests/system/providers/google/cloud/automl/example_automl_vision_object_detection.py +++ b/providers/tests/system/google/cloud/automl/example_automl_vision_object_detection.py @@ -134,13 +134,13 @@ >> delete_image_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/asana/__init__.py b/providers/tests/system/google/cloud/automl/resources/__init__.py similarity index 100% rename from tests/system/providers/asana/__init__.py rename to providers/tests/system/google/cloud/automl/resources/__init__.py diff --git a/tests/system/providers/cncf/__init__.py b/providers/tests/system/google/cloud/azure/__init__.py similarity index 100% rename from tests/system/providers/cncf/__init__.py rename to providers/tests/system/google/cloud/azure/__init__.py diff --git a/tests/system/providers/google/cloud/azure/example_azure_blob_to_gcs.py b/providers/tests/system/google/cloud/azure/example_azure_blob_to_gcs.py similarity index 94% rename from tests/system/providers/google/cloud/azure/example_azure_blob_to_gcs.py rename to providers/tests/system/google/cloud/azure/example_azure_blob_to_gcs.py index 265b9e0a4ab86..837cb99e5ffc1 100644 --- a/tests/system/providers/google/cloud/azure/example_azure_blob_to_gcs.py +++ b/providers/tests/system/google/cloud/azure/example_azure_blob_to_gcs.py @@ -59,13 +59,13 @@ (wait_for_blob >> transfer_files_to_gcs) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py b/providers/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py similarity index 93% rename from tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py rename to providers/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py index 80eaadd6c6d6d..a897b6dc7d3d1 100644 --- a/tests/system/providers/google/cloud/azure/example_azure_fileshare_to_gcs.py +++ b/providers/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py @@ -23,7 +23,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.azure_fileshare_to_gcs import AzureFileShareToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -80,14 +81,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cncf/kubernetes/__init__.py b/providers/tests/system/google/cloud/bigquery/__init__.py similarity index 100% rename from tests/system/providers/cncf/kubernetes/__init__.py rename to providers/tests/system/google/cloud/bigquery/__init__.py diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py similarity index 95% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py index 004f996975be1..87e13a2c25165 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_dataset.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py @@ -85,14 +85,14 @@ >> delete_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py similarity index 97% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py index 3f75d43935131..b387563069a94 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_dts.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py @@ -44,7 +44,8 @@ from airflow.providers.google.cloud.sensors.bigquery_dts import BigQueryDataTransferServiceTransferRunSensor from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -179,14 +180,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_operations.py similarity index 96% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_operations.py index a394c882beded..5e487f5a93e2a 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_operations.py @@ -97,14 +97,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_operations_location.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_operations_location.py similarity index 95% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_operations_location.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_operations_location.py index ac5b4dd967b30..2b026070a06b4 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_operations_location.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_operations_location.py @@ -77,14 +77,14 @@ >> delete_dataset_with_location ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py similarity index 97% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py index ab7a4b3757b9b..cd31ea05bcdcb 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py @@ -39,7 +39,8 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -246,7 +247,7 @@ execute_insert_query >> [check_count, check_value, check_interval] >> delete_dataset execute_insert_query >> [column_check, table_check] >> delete_dataset - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG @@ -256,7 +257,7 @@ globals()[DAG_ID] = dag for dag in DAGS_LIST: - from tests.system.utils import get_test_run + from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py similarity index 98% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py index a007e1cd639c0..012eba080d1a4 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_queries_async.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py @@ -259,14 +259,14 @@ insert_query_job >> execute_long_running_query >> check_value >> check_interval [check_count, check_interval, bigquery_execute_multi_query, get_data_result] >> delete_dataset - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py similarity index 97% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py index 57cde5e9aa824..aa8d36f5ed893 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py @@ -165,14 +165,14 @@ >> delete_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py similarity index 97% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py index 3539da742a2ce..9c1432a0a8da5 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_tables.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py @@ -41,7 +41,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "bigquery_tables" @@ -223,14 +224,14 @@ >> delete_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_bigquery.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_bigquery.py similarity index 96% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_to_bigquery.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_to_bigquery.py index 67ad5da80256f..cb1fa63498da9 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_bigquery.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_bigquery.py @@ -100,14 +100,14 @@ >> delete_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py similarity index 94% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py index 8edeb1fb9f9d0..21acb27f161db 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py @@ -33,7 +33,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "bigquery_to_gcs" @@ -97,14 +98,14 @@ >> [delete_bucket, delete_dataset] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs_async.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs_async.py similarity index 94% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs_async.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs_async.py index c2ba4bf287d1d..fdf084601c770 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_gcs_async.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs_async.py @@ -33,7 +33,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "bigquery_to_gcs_async" @@ -96,14 +97,14 @@ >> [delete_bucket, delete_dataset] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mssql.py similarity index 98% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_to_mssql.py index e9b3269ecfb6c..51e713560bc23 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mssql.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mssql.py @@ -326,13 +326,13 @@ def delete_connection(connection_id: str) -> None: >> delete_persistent_disk ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mysql.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mysql.py similarity index 95% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_to_mysql.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_to_mysql.py index cacd94a42a6b5..a3b726098773d 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_mysql.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mysql.py @@ -88,13 +88,13 @@ >> delete_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_postgres.py similarity index 98% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_to_postgres.py index 4a3b0386da0f4..ca4e3148d0a18 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_to_postgres.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_postgres.py @@ -362,13 +362,13 @@ def delete_connection(connection_id: str) -> None: >> delete_persistent_disk ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_transfer.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_transfer.py similarity index 95% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_transfer.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_transfer.py index e88858ef8c0b3..73d55710c9db9 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_transfer.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_transfer.py @@ -34,7 +34,8 @@ from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -116,13 +117,13 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/example_bigquery_value_check.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_value_check.py similarity index 95% rename from tests/system/providers/google/cloud/bigquery/example_bigquery_value_check.py rename to providers/tests/system/google/cloud/bigquery/example_bigquery_value_check.py index 4a14ac6a810b3..3872ca092321f 100644 --- a/tests/system/providers/google/cloud/bigquery/example_bigquery_value_check.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_value_check.py @@ -131,8 +131,8 @@ >> delete_dataset ) - from tests.system.utils import get_test_run - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.system_tests import get_test_run + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG @@ -142,7 +142,7 @@ test_run = get_test_run(dag) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/cohere/__init__.py b/providers/tests/system/google/cloud/bigquery/resources/__init__.py similarity index 100% rename from tests/system/providers/cohere/__init__.py rename to providers/tests/system/google/cloud/bigquery/resources/__init__.py diff --git a/tests/system/providers/google/cloud/bigquery/resources/example_bigquery_query.sql b/providers/tests/system/google/cloud/bigquery/resources/example_bigquery_query.sql similarity index 100% rename from tests/system/providers/google/cloud/bigquery/resources/example_bigquery_query.sql rename to providers/tests/system/google/cloud/bigquery/resources/example_bigquery_query.sql diff --git a/tests/system/providers/google/cloud/bigquery/resources/update_table_schema.json b/providers/tests/system/google/cloud/bigquery/resources/update_table_schema.json similarity index 100% rename from tests/system/providers/google/cloud/bigquery/resources/update_table_schema.json rename to providers/tests/system/google/cloud/bigquery/resources/update_table_schema.json diff --git a/tests/system/providers/google/cloud/bigquery/resources/us-states.csv b/providers/tests/system/google/cloud/bigquery/resources/us-states.csv similarity index 100% rename from tests/system/providers/google/cloud/bigquery/resources/us-states.csv rename to providers/tests/system/google/cloud/bigquery/resources/us-states.csv diff --git a/tests/system/providers/common/__init__.py b/providers/tests/system/google/cloud/bigtable/__init__.py similarity index 100% rename from tests/system/providers/common/__init__.py rename to providers/tests/system/google/cloud/bigtable/__init__.py diff --git a/tests/system/providers/google/cloud/bigtable/example_bigtable.py b/providers/tests/system/google/cloud/bigtable/example_bigtable.py similarity index 97% rename from tests/system/providers/google/cloud/bigtable/example_bigtable.py rename to providers/tests/system/google/cloud/bigtable/example_bigtable.py index 1c690f2ddc721..77abc49fae258 100644 --- a/tests/system/providers/google/cloud/bigtable/example_bigtable.py +++ b/providers/tests/system/google/cloud/bigtable/example_bigtable.py @@ -61,7 +61,8 @@ ) from airflow.providers.google.cloud.sensors.bigtable import BigtableTableReplicationCompletedSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -224,14 +225,14 @@ def update_clusters_and_instance(): >> [delete_instance_task, delete_instance_task2] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/common/io/__init__.py b/providers/tests/system/google/cloud/cloud_batch/__init__.py similarity index 100% rename from tests/system/providers/common/io/__init__.py rename to providers/tests/system/google/cloud/cloud_batch/__init__.py diff --git a/tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py b/providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py similarity index 96% rename from tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py rename to providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py index 58d852a286bc8..84dc031a5268f 100644 --- a/tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py +++ b/providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py @@ -35,7 +35,8 @@ CloudBatchSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -186,13 +187,13 @@ def _create_job(): ([submit1, submit2] >> list_tasks >> assert_tasks >> list_jobs >> get_name >> [delete_job1, delete_job2]) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/common/sql/__init__.py b/providers/tests/system/google/cloud/cloud_build/__init__.py similarity index 100% rename from tests/system/providers/common/sql/__init__.py rename to providers/tests/system/google/cloud/cloud_build/__init__.py diff --git a/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py b/providers/tests/system/google/cloud/cloud_build/example_cloud_build.py similarity index 97% rename from tests/system/providers/google/cloud/cloud_build/example_cloud_build.py rename to providers/tests/system/google/cloud/cloud_build/example_cloud_build.py index cb31a3b4d091d..bf2391413a1c3 100644 --- a/tests/system/providers/google/cloud/cloud_build/example_cloud_build.py +++ b/providers/tests/system/google/cloud/cloud_build/example_cloud_build.py @@ -39,7 +39,8 @@ CloudBuildRetryBuildOperator, ) from airflow.providers.standard.operators.bash import BashOperator -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -51,7 +52,7 @@ # If you'd like to run this system test locally, please # 1. Create Cloud Source Repository # 2. Push into a master branch the following file: -# tests/system/providers/google/cloud/cloud_build/resources/example_cloud_build.yaml +# providers/tests/system/google/cloud/cloud_build/resources/example_cloud_build.yaml GCP_SOURCE_REPOSITORY_NAME = "test-cloud-build-repository" CURRENT_FOLDER = Path(__file__).parent @@ -266,14 +267,14 @@ def no_wait_cancel_retry_get_deferrable(): ] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py b/providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py similarity index 96% rename from tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py rename to providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py index 6c2c64ba6283a..f6873da9c5680 100644 --- a/tests/system/providers/google/cloud/cloud_build/example_cloud_build_trigger.py +++ b/providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py @@ -41,7 +41,8 @@ CloudBuildUpdateBuildTriggerOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -52,7 +53,7 @@ # If you'd like to run this system test locally, please # 1. Create Cloud Source Repository # 2. Push into a master branch the following file: -# tests/system/providers/google/cloud/cloud_build/resources/example_cloud_build.yaml +# providers/tests/system/google/cloud/cloud_build/resources/example_cloud_build.yaml GCP_SOURCE_REPOSITORY_NAME = "test-cloud-build-repository" TRIGGER_NAME = f"cloud-build-trigger-{ENV_ID}".replace("_", "-") @@ -181,14 +182,14 @@ def get_project_number(): >> list_build_triggers ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/databricks/__init__.py b/providers/tests/system/google/cloud/cloud_build/resources/__init__.py similarity index 100% rename from tests/system/providers/databricks/__init__.py rename to providers/tests/system/google/cloud/cloud_build/resources/__init__.py diff --git a/tests/system/providers/google/cloud/cloud_build/resources/example_cloud_build.yaml b/providers/tests/system/google/cloud/cloud_build/resources/example_cloud_build.yaml similarity index 100% rename from tests/system/providers/google/cloud/cloud_build/resources/example_cloud_build.yaml rename to providers/tests/system/google/cloud/cloud_build/resources/example_cloud_build.yaml diff --git a/tests/system/providers/dbt/cloud/__init__.py b/providers/tests/system/google/cloud/cloud_functions/__init__.py similarity index 100% rename from tests/system/providers/dbt/cloud/__init__.py rename to providers/tests/system/google/cloud/cloud_functions/__init__.py diff --git a/tests/system/providers/google/cloud/cloud_functions/example_functions.py b/providers/tests/system/google/cloud/cloud_functions/example_functions.py similarity index 95% rename from tests/system/providers/google/cloud/cloud_functions/example_functions.py rename to providers/tests/system/google/cloud/cloud_functions/example_functions.py index 84e31bc745178..fd0b8ea5f5a4c 100644 --- a/tests/system/providers/google/cloud/cloud_functions/example_functions.py +++ b/providers/tests/system/google/cloud/cloud_functions/example_functions.py @@ -34,7 +34,8 @@ CloudFunctionDeployFunctionOperator, CloudFunctionInvokeFunctionOperator, ) -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -121,14 +122,14 @@ delete_function, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/dingding/__init__.py b/providers/tests/system/google/cloud/cloud_memorystore/__init__.py similarity index 100% rename from tests/system/providers/dingding/__init__.py rename to providers/tests/system/google/cloud/cloud_memorystore/__init__.py diff --git a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py b/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py similarity index 98% rename from tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py rename to providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py index 4884122751e09..670a850b04ac4 100644 --- a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +++ b/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py @@ -198,14 +198,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py b/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py similarity index 97% rename from tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py rename to providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py index c46d966371dac..3d8ebf6f287dc 100644 --- a/tests/system/providers/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +++ b/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py @@ -47,7 +47,8 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") ENV_ID_LOWER = ENV_ID.lower() if ENV_ID else "" @@ -265,14 +266,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/docker/__init__.py b/providers/tests/system/google/cloud/cloud_run/__init__.py similarity index 100% rename from tests/system/providers/docker/__init__.py rename to providers/tests/system/google/cloud/cloud_run/__init__.py diff --git a/tests/system/providers/google/cloud/cloud_run/example_cloud_run.py b/providers/tests/system/google/cloud/cloud_run/example_cloud_run.py similarity index 98% rename from tests/system/providers/google/cloud/cloud_run/example_cloud_run.py rename to providers/tests/system/google/cloud/cloud_run/example_cloud_run.py index 23370d26876b6..08145e0336ede 100644 --- a/tests/system/providers/google/cloud/cloud_run/example_cloud_run.py +++ b/providers/tests/system/google/cloud/cloud_run/example_cloud_run.py @@ -367,13 +367,13 @@ def _create_job_instance_with_label(): >> (delete_job1, delete_job2, delete_job3) ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_run/example_cloud_run_service.py b/providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py similarity index 95% rename from tests/system/providers/google/cloud/cloud_run/example_cloud_run_service.py rename to providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py index 0c838e6d4e28e..43318c4b14306 100644 --- a/tests/system/providers/google/cloud/cloud_run/example_cloud_run_service.py +++ b/providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py @@ -84,14 +84,14 @@ def _create_service(): >> delete_cloud_run_service ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/elasticsearch/__init__.py b/providers/tests/system/google/cloud/cloud_sql/__init__.py similarity index 100% rename from tests/system/providers/elasticsearch/__init__.py rename to providers/tests/system/google/cloud/cloud_sql/__init__.py diff --git a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py similarity index 98% rename from tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py rename to providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py index 7a60e3db61718..52414b2784b27 100644 --- a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql.py +++ b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py @@ -47,7 +47,8 @@ GCSObjectCreateAclEntryOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -293,14 +294,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py similarity index 98% rename from tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py rename to providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py index f6d588c3156c8..6cf7e0c08751c 100644 --- a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query.py +++ b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py @@ -43,7 +43,8 @@ ) from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -540,13 +541,13 @@ def delete_connection(connection_id: str) -> None: # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py similarity index 98% rename from tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py rename to providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py index 4808141512ec5..db77f18339af0 100644 --- a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py +++ b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py @@ -48,7 +48,8 @@ ) from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -490,13 +491,13 @@ def delete_secret(ssl_secret_id, db_type: str) -> None: # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/ftp/__init__.py b/providers/tests/system/google/cloud/composer/__init__.py similarity index 100% rename from tests/system/providers/ftp/__init__.py rename to providers/tests/system/google/cloud/composer/__init__.py diff --git a/tests/system/providers/google/cloud/composer/example_cloud_composer.py b/providers/tests/system/google/cloud/composer/example_cloud_composer.py similarity index 98% rename from tests/system/providers/google/cloud/composer/example_cloud_composer.py rename to providers/tests/system/google/cloud/composer/example_cloud_composer.py index 75ee8e90d3aa5..266a7e4a444ad 100644 --- a/tests/system/providers/google/cloud/composer/example_cloud_composer.py +++ b/providers/tests/system/google/cloud/composer/example_cloud_composer.py @@ -214,14 +214,14 @@ [delete_env, defer_delete_env], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/github/__init__.py b/providers/tests/system/google/cloud/compute/__init__.py similarity index 100% rename from tests/system/providers/github/__init__.py rename to providers/tests/system/google/cloud/compute/__init__.py diff --git a/tests/system/providers/google/cloud/compute/example_compute.py b/providers/tests/system/google/cloud/compute/example_compute.py similarity index 97% rename from tests/system/providers/google/cloud/compute/example_compute.py rename to providers/tests/system/google/cloud/compute/example_compute.py index 27e8ae758e70b..8343920890708 100644 --- a/tests/system/providers/google/cloud/compute/example_compute.py +++ b/providers/tests/system/google/cloud/compute/example_compute.py @@ -39,7 +39,8 @@ ComputeEngineStopInstanceOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_operator_gce_args_common] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -266,14 +267,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/compute/example_compute_igm.py b/providers/tests/system/google/cloud/compute/example_compute_igm.py similarity index 97% rename from tests/system/providers/google/cloud/compute/example_compute_igm.py rename to providers/tests/system/google/cloud/compute/example_compute_igm.py index 74072b2099446..11357452dfc69 100644 --- a/tests/system/providers/google/cloud/compute/example_compute_igm.py +++ b/providers/tests/system/google/cloud/compute/example_compute_igm.py @@ -38,7 +38,8 @@ ComputeEngineInstanceGroupUpdateManagerTemplateOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -235,14 +236,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/compute/example_compute_ssh.py b/providers/tests/system/google/cloud/compute/example_compute_ssh.py similarity index 96% rename from tests/system/providers/google/cloud/compute/example_compute_ssh.py rename to providers/tests/system/google/cloud/compute/example_compute_ssh.py index e0bece195762f..5b7d1523017ff 100644 --- a/tests/system/providers/google/cloud/compute/example_compute_ssh.py +++ b/providers/tests/system/google/cloud/compute/example_compute_ssh.py @@ -34,7 +34,8 @@ ) from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_operator_gce_args_common] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -137,14 +138,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/compute/example_compute_ssh_os_login.py b/providers/tests/system/google/cloud/compute/example_compute_ssh_os_login.py similarity index 96% rename from tests/system/providers/google/cloud/compute/example_compute_ssh_os_login.py rename to providers/tests/system/google/cloud/compute/example_compute_ssh_os_login.py index 99e04fa3d3eb3..63ed278a08c5a 100644 --- a/tests/system/providers/google/cloud/compute/example_compute_ssh_os_login.py +++ b/providers/tests/system/google/cloud/compute/example_compute_ssh_os_login.py @@ -34,7 +34,8 @@ ) from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_operator_gce_args_common] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -145,14 +146,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/compute/example_compute_ssh_parallel.py b/providers/tests/system/google/cloud/compute/example_compute_ssh_parallel.py similarity index 96% rename from tests/system/providers/google/cloud/compute/example_compute_ssh_parallel.py rename to providers/tests/system/google/cloud/compute/example_compute_ssh_parallel.py index b5964eed7dd5e..3ab61afc0c259 100644 --- a/tests/system/providers/google/cloud/compute/example_compute_ssh_parallel.py +++ b/providers/tests/system/google/cloud/compute/example_compute_ssh_parallel.py @@ -34,7 +34,8 @@ ) from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_operator_gce_args_common] ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -138,14 +139,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/ads/__init__.py b/providers/tests/system/google/cloud/data_loss_prevention/__init__.py similarity index 100% rename from tests/system/providers/google/ads/__init__.py rename to providers/tests/system/google/cloud/data_loss_prevention/__init__.py diff --git a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py similarity index 96% rename from tests/system/providers/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py rename to providers/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py index 7acae202dea86..75b1ad93dd479 100644 --- a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py @@ -38,7 +38,8 @@ CloudDLPUpdateDeidentifyTemplateOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_deidentify_content" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -156,14 +157,14 @@ >> delete_template ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py similarity index 96% rename from tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py rename to providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py index 40e5c030d981b..fb2198b16793f 100644 --- a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py @@ -41,7 +41,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_info_types" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -153,14 +154,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_inspect_template.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py similarity index 95% rename from tests/system/providers/google/cloud/data_loss_prevention/example_dlp_inspect_template.py rename to providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py index fb9ca15e2d722..22b259582f4ac 100644 --- a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_inspect_template.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py @@ -37,7 +37,8 @@ CloudDLPUpdateInspectTemplateOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_inspect_template" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -114,14 +115,14 @@ >> delete_template ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job.py similarity index 94% rename from tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job.py rename to providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job.py index 2bea0ce57bcea..b7dc2ac1a4053 100644 --- a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job.py @@ -36,7 +36,8 @@ CloudDLPListDLPJobsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_job" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -89,14 +90,14 @@ (create_job >> list_jobs >> get_job >> cancel_job >> delete_job) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job_trigger.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py similarity index 94% rename from tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job_trigger.py rename to providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py index c2a1908e87f77..902875f7b5921 100644 --- a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_job_trigger.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py @@ -34,7 +34,8 @@ CloudDLPUpdateJobTriggerOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dlp_job_trigger" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -94,14 +95,14 @@ (create_trigger >> list_triggers >> get_trigger >> update_trigger >> delete_trigger) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/__init__.py b/providers/tests/system/google/cloud/data_loss_prevention/resources/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/__init__.py rename to providers/tests/system/google/cloud/data_loss_prevention/resources/__init__.py diff --git a/tests/system/providers/google/cloud/data_loss_prevention/resources/dictionary.txt b/providers/tests/system/google/cloud/data_loss_prevention/resources/dictionary.txt similarity index 100% rename from tests/system/providers/google/cloud/data_loss_prevention/resources/dictionary.txt rename to providers/tests/system/google/cloud/data_loss_prevention/resources/dictionary.txt diff --git a/tests/system/providers/google/cloud/automl/__init__.py b/providers/tests/system/google/cloud/dataflow/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/automl/__init__.py rename to providers/tests/system/google/cloud/dataflow/__init__.py diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_go.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_go.py similarity index 98% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_go.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_go.py index 14c23a015d2de..57e5941e3ed6b 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_go.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_go.py @@ -149,7 +149,7 @@ def check_autoscaling_event(autoscaling_events: list[dict]) -> bool: ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_native_java.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py similarity index 97% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_native_java.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py index 34dad6cdeb293..3629194e6ee9b 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_native_java.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py @@ -148,14 +148,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_native_python.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python.py similarity index 96% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_native_python.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_native_python.py index 181c5cbbe0106..229373ed1bf3f 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_native_python.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python.py @@ -110,14 +110,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py similarity index 98% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py index f93378c6db0b0..31f1cd026f19a 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_native_python_async.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py @@ -176,14 +176,14 @@ def check_autoscaling_event(autoscaling_events: list[dict]) -> bool: >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_pipeline.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py similarity index 97% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_pipeline.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py index 8f41b3ed7fb2a..cfd5e06b029b6 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_pipeline.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py @@ -137,14 +137,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_sensors_deferrable.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py similarity index 98% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_sensors_deferrable.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py index 4225b6c9ff80e..1a6d0aec3923b 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_sensors_deferrable.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py @@ -177,14 +177,14 @@ def check_autoscaling_event(autoscaling_events: list[dict]) -> bool: >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_sql.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_sql.py similarity index 95% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_sql.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_sql.py index 5b016fec3ec32..e3b0ee711a921 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_sql.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_sql.py @@ -34,7 +34,8 @@ ) from airflow.providers.google.cloud.operators.dataflow import DataflowStartSqlJobOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -136,13 +137,13 @@ >> delete_bq_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_streaming_python.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_streaming_python.py similarity index 96% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_streaming_python.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_streaming_python.py index a858ad90aa939..a15dffb3a3282 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_streaming_python.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_streaming_python.py @@ -114,14 +114,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_template.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_template.py similarity index 96% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_template.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_template.py index 79ddfbef438de..86545514607aa 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_template.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_template.py @@ -37,7 +37,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -157,14 +158,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/example_dataflow_yaml.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_yaml.py similarity index 96% rename from tests/system/providers/google/cloud/dataflow/example_dataflow_yaml.py rename to providers/tests/system/google/cloud/dataflow/example_dataflow_yaml.py index d5162e8adf1b3..2243ad695bdd4 100644 --- a/tests/system/providers/google/cloud/dataflow/example_dataflow_yaml.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_yaml.py @@ -39,7 +39,8 @@ ) from airflow.providers.google.cloud.operators.dataflow import DataflowStartYamlJobOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -161,13 +162,13 @@ >> delete_bq_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/automl/resources/__init__.py b/providers/tests/system/google/cloud/dataflow/resources/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/automl/resources/__init__.py rename to providers/tests/system/google/cloud/dataflow/resources/__init__.py diff --git a/tests/system/providers/google/cloud/dataflow/resources/input.csv b/providers/tests/system/google/cloud/dataflow/resources/input.csv similarity index 100% rename from tests/system/providers/google/cloud/dataflow/resources/input.csv rename to providers/tests/system/google/cloud/dataflow/resources/input.csv diff --git a/tests/system/providers/google/cloud/dataflow/resources/schema.json b/providers/tests/system/google/cloud/dataflow/resources/schema.json similarity index 100% rename from tests/system/providers/google/cloud/dataflow/resources/schema.json rename to providers/tests/system/google/cloud/dataflow/resources/schema.json diff --git a/tests/system/providers/google/cloud/dataflow/resources/text.txt b/providers/tests/system/google/cloud/dataflow/resources/text.txt similarity index 100% rename from tests/system/providers/google/cloud/dataflow/resources/text.txt rename to providers/tests/system/google/cloud/dataflow/resources/text.txt diff --git a/tests/system/providers/google/cloud/dataflow/resources/wordcount.go b/providers/tests/system/google/cloud/dataflow/resources/wordcount.go similarity index 100% rename from tests/system/providers/google/cloud/dataflow/resources/wordcount.go rename to providers/tests/system/google/cloud/dataflow/resources/wordcount.go diff --git a/tests/system/providers/google/cloud/azure/__init__.py b/providers/tests/system/google/cloud/dataform/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/azure/__init__.py rename to providers/tests/system/google/cloud/dataform/__init__.py diff --git a/tests/system/providers/google/cloud/dataform/example_dataform.py b/providers/tests/system/google/cloud/dataform/example_dataform.py similarity index 98% rename from tests/system/providers/google/cloud/dataform/example_dataform.py rename to providers/tests/system/google/cloud/dataform/example_dataform.py index 3f2ad987f233a..b61247a877008 100644 --- a/tests/system/providers/google/cloud/dataform/example_dataform.py +++ b/providers/tests/system/google/cloud/dataform/example_dataform.py @@ -48,7 +48,8 @@ from airflow.providers.google.cloud.sensors.dataform import DataformWorkflowInvocationStateSensor from airflow.providers.google.cloud.utils.dataform import make_initialization_workspace_flow from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -327,13 +328,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/__init__.py b/providers/tests/system/google/cloud/datafusion/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/bigquery/__init__.py rename to providers/tests/system/google/cloud/datafusion/__init__.py diff --git a/tests/system/providers/google/cloud/datafusion/example_datafusion.py b/providers/tests/system/google/cloud/datafusion/example_datafusion.py similarity index 98% rename from tests/system/providers/google/cloud/datafusion/example_datafusion.py rename to providers/tests/system/google/cloud/datafusion/example_datafusion.py index d206c2260ee24..a61fbbd01da91 100644 --- a/tests/system/providers/google/cloud/datafusion/example_datafusion.py +++ b/providers/tests/system/google/cloud/datafusion/example_datafusion.py @@ -41,7 +41,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.sensors.datafusion import CloudDataFusionPipelineStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [START howto_data_fusion_env_variables] SERVICE_ACCOUNT = os.environ.get("GCP_DATAFUSION_SERVICE_ACCOUNT") @@ -339,13 +340,13 @@ def get_artifacts_versions(ti=None): >> [delete_bucket1, delete_bucket2] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigquery/resources/__init__.py b/providers/tests/system/google/cloud/datapipelines/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/bigquery/resources/__init__.py rename to providers/tests/system/google/cloud/datapipelines/__init__.py diff --git a/tests/system/providers/google/cloud/datapipelines/example_datapipeline.py b/providers/tests/system/google/cloud/datapipelines/example_datapipeline.py similarity index 96% rename from tests/system/providers/google/cloud/datapipelines/example_datapipeline.py rename to providers/tests/system/google/cloud/datapipelines/example_datapipeline.py index f13a52a7c6734..91fb7eed26d2a 100644 --- a/tests/system/providers/google/cloud/datapipelines/example_datapipeline.py +++ b/providers/tests/system/google/cloud/datapipelines/example_datapipeline.py @@ -37,7 +37,8 @@ GCSSynchronizeBucketsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "datapipeline" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -137,14 +138,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/bigtable/__init__.py b/providers/tests/system/google/cloud/dataplex/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/bigtable/__init__.py rename to providers/tests/system/google/cloud/dataplex/__init__.py diff --git a/tests/system/providers/google/cloud/dataplex/example_dataplex.py b/providers/tests/system/google/cloud/dataplex/example_dataplex.py similarity index 97% rename from tests/system/providers/google/cloud/dataplex/example_dataplex.py rename to providers/tests/system/google/cloud/dataplex/example_dataplex.py index a0ac55e07fc95..34e9a0fd05c19 100644 --- a/tests/system/providers/google/cloud/dataplex/example_dataplex.py +++ b/providers/tests/system/google/cloud/dataplex/example_dataplex.py @@ -40,7 +40,8 @@ ) from airflow.providers.google.cloud.sensors.dataplex import DataplexTaskStateSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -205,14 +206,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py b/providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py similarity index 98% rename from tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py rename to providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py index 3203a0d0cd3e2..44f4af2435925 100644 --- a/tests/system/providers/google/cloud/dataplex/example_dataplex_dp.py +++ b/providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py @@ -50,7 +50,8 @@ ) from airflow.providers.google.cloud.sensors.dataplex import DataplexDataProfileJobStatusSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -328,14 +329,14 @@ [delete_lake, delete_dataset], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py b/providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py similarity index 98% rename from tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py rename to providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py index 6255f5425f18f..d31d574755c90 100644 --- a/tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py +++ b/providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py @@ -50,7 +50,8 @@ ) from airflow.providers.google.cloud.sensors.dataplex import DataplexDataQualityJobStatusSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -362,14 +363,14 @@ [delete_lake, delete_dataset], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_batch/__init__.py b/providers/tests/system/google/cloud/dataprep/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/cloud_batch/__init__.py rename to providers/tests/system/google/cloud/dataprep/__init__.py diff --git a/tests/system/providers/google/cloud/dataprep/example_dataprep.py b/providers/tests/system/google/cloud/dataprep/example_dataprep.py similarity index 98% rename from tests/system/providers/google/cloud/dataprep/example_dataprep.py rename to providers/tests/system/google/cloud/dataprep/example_dataprep.py index 3573285055192..9f603f43fb1b3 100644 --- a/tests/system/providers/google/cloud/dataprep/example_dataprep.py +++ b/providers/tests/system/google/cloud/dataprep/example_dataprep.py @@ -47,7 +47,8 @@ from airflow.providers.google.cloud.sensors.dataprep import DataprepJobGroupIsFinishedSensor from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataprep" @@ -306,13 +307,13 @@ def delete_connection(connection_id: str) -> None: [delete_bucket_task, delete_connection_task], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_build/__init__.py b/providers/tests/system/google/cloud/dataproc/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/cloud_build/__init__.py rename to providers/tests/system/google/cloud/dataproc/__init__.py diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py similarity index 96% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py index 6771a529d79eb..54481f43ca762 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py @@ -35,7 +35,8 @@ ) from airflow.providers.google.cloud.sensors.dataproc import DataprocBatchSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -175,13 +176,13 @@ >> delete_batch_4 ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_deferrable.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py similarity index 93% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_batch_deferrable.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py index 557b8ce49060f..79d3766995e3d 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_deferrable.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py @@ -34,7 +34,8 @@ DataprocGetBatchOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_batch_deferrable" @@ -90,14 +91,14 @@ >> delete_batch ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py similarity index 96% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py index fb592b0cdb6fe..a8ed6b13de552 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py @@ -35,7 +35,8 @@ ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_batch_ps" @@ -140,14 +141,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py index ef57500639dc5..0eb27762c2f98 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py @@ -34,7 +34,8 @@ DataprocStopClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_create_existing_stopped_cluster" @@ -120,13 +121,13 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py index 3ff91d95a95be..8ae262dbf624c 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py @@ -33,7 +33,8 @@ DataprocUpdateClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_cluster_def" @@ -136,14 +137,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_diagnose.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_diagnose.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py index 1d94f688996f8..3eacc905bc703 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_diagnose.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py @@ -33,7 +33,8 @@ DataprocDiagnoseClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_diagnose_cluster" @@ -114,14 +115,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py index 19e2300e28ead..f990363c48004 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py @@ -39,7 +39,8 @@ GCSSynchronizeBucketsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_cluster_generation" @@ -134,14 +135,14 @@ >> [delete_cluster, delete_bucket] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_start_stop.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_start_stop.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py index 7759ddd098f0e..2e4b698573bf2 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_start_stop.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py @@ -34,7 +34,8 @@ DataprocStopClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_cluster_start_stop" @@ -110,13 +111,13 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_update.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_update.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py index ea7725209f808..7ed0061d9947c 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_update.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py @@ -33,7 +33,8 @@ DataprocUpdateClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_update" @@ -119,14 +120,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_flink.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_flink.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_flink.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_flink.py index 2eb6d4c4bdf4f..ce1a6fc4451e9 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_flink.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_flink.py @@ -34,7 +34,8 @@ ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_flink" @@ -123,14 +124,14 @@ >> [delete_cluster, delete_bucket] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py similarity index 96% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py index becf273784ab1..bb1044da62414 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py @@ -43,7 +43,8 @@ GKEDeleteClusterOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_gke" @@ -143,13 +144,13 @@ >> delete_gke_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_hadoop.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_hadoop.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_hadoop.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_hadoop.py index 23ddd6eb581b7..c1f5423041fa4 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_hadoop.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_hadoop.py @@ -34,7 +34,8 @@ ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_hadoop" @@ -122,14 +123,14 @@ >> [delete_cluster, delete_bucket] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py index 193d790a5282f..6aadceb552f61 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py @@ -33,7 +33,8 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_hive" @@ -123,14 +124,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_pig.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_pig.py similarity index 94% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_pig.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_pig.py index 344adaeca2e66..762ca2bc73fd2 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_pig.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_pig.py @@ -33,7 +33,8 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_pig" ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") @@ -107,14 +108,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_presto.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_presto.py similarity index 94% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_presto.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_presto.py index 224dfc3db5b1f..f5bdf25732a5e 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_presto.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_presto.py @@ -33,7 +33,8 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_presto" @@ -114,14 +115,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py index 44809b283767d..71e7245b7d728 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py @@ -38,7 +38,8 @@ GCSSynchronizeBucketsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_pyspark" @@ -139,14 +140,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark.py similarity index 94% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_spark.py index ca76810cb43e6..e43a56d230bf4 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark.py @@ -33,7 +33,8 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_spark" @@ -110,14 +111,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_async.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_async.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_spark_async.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_spark_async.py index 2ba8cc512f73c..475e9912e8562 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_async.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_async.py @@ -34,7 +34,8 @@ ) from airflow.providers.google.cloud.sensors.dataproc import DataprocJobSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_spark_async" @@ -120,14 +121,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_deferrable.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py similarity index 94% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_spark_deferrable.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py index cee74bb819b9d..5e6d0b773af6f 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_deferrable.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py @@ -34,7 +34,8 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_spark_deferrable" @@ -111,14 +112,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py similarity index 94% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py index dc446ad332e12..aef860a0e545a 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py @@ -33,7 +33,8 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_spark_sql" @@ -107,14 +108,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_sparkr.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_sparkr.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_sparkr.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_sparkr.py index 1468edf05eceb..7b416ade38648 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_sparkr.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_sparkr.py @@ -38,7 +38,8 @@ GCSSynchronizeBucketsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_sparkr" @@ -135,14 +136,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_trino.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_trino.py similarity index 94% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_trino.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_trino.py index 4631733cd0b4b..d3f7f2a1a3a4c 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_trino.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_trino.py @@ -33,7 +33,8 @@ DataprocSubmitJobOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_trino" @@ -116,14 +117,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_workflow.py index 334826b56523a..ab465a124d5fd 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow.py @@ -29,7 +29,8 @@ DataprocInstantiateInlineWorkflowTemplateOperator, DataprocInstantiateWorkflowTemplateOperator, ) -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_workflow" @@ -103,14 +104,14 @@ >> instantiate_inline_workflow_template ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow_deferrable.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow_deferrable.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc/example_dataproc_workflow_deferrable.py rename to providers/tests/system/google/cloud/dataproc/example_dataproc_workflow_deferrable.py index ec765250a3326..e2319a35a124f 100644 --- a/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow_deferrable.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow_deferrable.py @@ -29,7 +29,8 @@ DataprocInstantiateInlineWorkflowTemplateOperator, DataprocInstantiateWorkflowTemplateOperator, ) -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "dataproc_workflow_def" @@ -107,14 +108,14 @@ >> instantiate_inline_workflow_template_async ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_build/resources/__init__.py b/providers/tests/system/google/cloud/dataproc_metastore/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/cloud_build/resources/__init__.py rename to providers/tests/system/google/cloud/dataproc_metastore/__init__.py diff --git a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py similarity index 97% rename from tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py rename to providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py index 0d0c41f52fd9b..ee3b5b70ad519 100644 --- a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py +++ b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py @@ -42,7 +42,8 @@ GCSSynchronizeBucketsOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_metastore" PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -191,14 +192,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py similarity index 95% rename from tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py rename to providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py index 6a5a7566b3429..12af45d210653 100644 --- a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py +++ b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py @@ -35,7 +35,8 @@ DataprocMetastoreRestoreServiceOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "dataproc_metastore_backup" @@ -125,14 +126,14 @@ ) (create_service >> backup_service >> list_backups >> restore_service >> delete_backup >> delete_service) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py similarity index 97% rename from tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py rename to providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py index 07677e11cbf36..6b06b868c73e2 100644 --- a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py +++ b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py @@ -42,7 +42,8 @@ from airflow.providers.google.cloud.sensors.dataproc_metastore import MetastoreHivePartitionSensor from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID DAG_ID = "hive_partition_sensor" PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -231,14 +232,14 @@ def get_hive_warehouse_bucket(**kwargs): >> [delete_dataproc_cluster, delete_metastore_service, delete_warehouse_bucket] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_functions/__init__.py b/providers/tests/system/google/cloud/datastore/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/cloud_functions/__init__.py rename to providers/tests/system/google/cloud/datastore/__init__.py diff --git a/tests/system/providers/google/cloud/datastore/example_datastore_commit.py b/providers/tests/system/google/cloud/datastore/example_datastore_commit.py similarity index 96% rename from tests/system/providers/google/cloud/datastore/example_datastore_commit.py rename to providers/tests/system/google/cloud/datastore/example_datastore_commit.py index fc7eabfb183a2..430d176ed733c 100644 --- a/tests/system/providers/google/cloud/datastore/example_datastore_commit.py +++ b/providers/tests/system/google/cloud/datastore/example_datastore_commit.py @@ -38,7 +38,8 @@ ) from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -159,14 +160,14 @@ [delete_bucket, delete_export_operation, delete_import_operation], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/datastore/example_datastore_query.py b/providers/tests/system/google/cloud/datastore/example_datastore_query.py similarity index 93% rename from tests/system/providers/google/cloud/datastore/example_datastore_query.py rename to providers/tests/system/google/cloud/datastore/example_datastore_query.py index 24bb1b85dd983..0b282a1feb34e 100644 --- a/tests/system/providers/google/cloud/datastore/example_datastore_query.py +++ b/providers/tests/system/google/cloud/datastore/example_datastore_query.py @@ -31,7 +31,8 @@ CloudDatastoreBeginTransactionOperator, CloudDatastoreRunQueryOperator, ) -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -79,14 +80,14 @@ allocate_ids >> begin_transaction_query >> run_query - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/datastore/example_datastore_rollback.py b/providers/tests/system/google/cloud/datastore/example_datastore_rollback.py similarity index 91% rename from tests/system/providers/google/cloud/datastore/example_datastore_rollback.py rename to providers/tests/system/google/cloud/datastore/example_datastore_rollback.py index 09661b0ae0c1e..44d0dc1709418 100644 --- a/tests/system/providers/google/cloud/datastore/example_datastore_rollback.py +++ b/providers/tests/system/google/cloud/datastore/example_datastore_rollback.py @@ -30,7 +30,8 @@ CloudDatastoreBeginTransactionOperator, CloudDatastoreRollbackOperator, ) -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -62,14 +63,14 @@ begin_transaction_to_rollback >> rollback_transaction - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_memorystore/__init__.py b/providers/tests/system/google/cloud/gcs/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/cloud_memorystore/__init__.py rename to providers/tests/system/google/cloud/gcs/__init__.py diff --git a/tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_calendar_to_gcs.py similarity index 96% rename from tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_calendar_to_gcs.py index 52023060c7d6d..f44e7315e2d18 100644 --- a/tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_calendar_to_gcs.py @@ -115,13 +115,13 @@ def delete_connection(connection_id: str) -> None: >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_firestore.py b/providers/tests/system/google/cloud/gcs/example_firestore.py similarity index 97% rename from tests/system/providers/google/cloud/gcs/example_firestore.py rename to providers/tests/system/google/cloud/gcs/example_firestore.py index cd829f0ca79c5..89e0e3fe1c6d9 100644 --- a/tests/system/providers/google/cloud/gcs/example_firestore.py +++ b/providers/tests/system/google/cloud/gcs/example_firestore.py @@ -170,14 +170,14 @@ >> [delete_dataset, delete_bucket] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_acl.py b/providers/tests/system/google/cloud/gcs/example_gcs_acl.py similarity index 94% rename from tests/system/providers/google/cloud/gcs/example_gcs_acl.py rename to providers/tests/system/google/cloud/gcs/example_gcs_acl.py index 0b550b22ba07c..7843c0877c1d9 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_acl.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_acl.py @@ -33,7 +33,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -107,14 +108,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py b/providers/tests/system/google/cloud/gcs/example_gcs_copy_delete.py similarity index 95% rename from tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py rename to providers/tests/system/google/cloud/gcs/example_gcs_copy_delete.py index aebb1e3e7ed85..6bbec540df9f2 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_copy_delete.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_copy_delete.py @@ -36,7 +36,8 @@ from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -121,14 +122,14 @@ [delete_bucket_src, delete_bucket_dst], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_sensor.py b/providers/tests/system/google/cloud/gcs/example_gcs_sensor.py similarity index 97% rename from tests/system/providers/google/cloud/gcs/example_gcs_sensor.py rename to providers/tests/system/google/cloud/gcs/example_gcs_sensor.py index 5dc42604ddca0..2d4da2887d4e5 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_sensor.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_sensor.py @@ -36,7 +36,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -200,14 +201,14 @@ def mode_setter(self, value): delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py similarity index 93% rename from tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery.py rename to providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py index 94286ade4369c..5d7e393bff29e 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py @@ -31,7 +31,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "gcs_to_bigquery_operator" @@ -81,13 +82,13 @@ >> delete_test_dataset ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery_async.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py similarity index 96% rename from tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery_async.py rename to providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py index 27bef2486fd56..1ca531b818008 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_to_bigquery_async.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py @@ -31,7 +31,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -173,13 +174,13 @@ >> delete_test_dataset_delimiter ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py similarity index 97% rename from tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py index 55bec85a50562..ac1d70307a535 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py @@ -40,7 +40,8 @@ from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -274,14 +275,14 @@ def delete_work_dir(create_workdir_result: str) -> None: [delete_bucket_src, delete_bucket_dst, delete_work_dir(create_workdir_task)], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py similarity index 97% rename from tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py rename to providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py index e121e63bac971..bfb200a15a63d 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py @@ -39,7 +39,8 @@ from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -203,13 +204,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_to_sheets.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_sheets.py similarity index 97% rename from tests/system/providers/google/cloud/gcs/example_gcs_to_sheets.py rename to providers/tests/system/google/cloud/gcs/example_gcs_to_sheets.py index a25c499ec5071..69947698cd7bb 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_to_sheets.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_sheets.py @@ -131,13 +131,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_transform.py b/providers/tests/system/google/cloud/gcs/example_gcs_transform.py similarity index 93% rename from tests/system/providers/google/cloud/gcs/example_gcs_transform.py rename to providers/tests/system/google/cloud/gcs/example_gcs_transform.py index 018076173d28f..0b59119c6bc1e 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_transform.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_transform.py @@ -33,7 +33,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -94,14 +95,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_transform_timespan.py b/providers/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py similarity index 95% rename from tests/system/providers/google/cloud/gcs/example_gcs_transform_timespan.py rename to providers/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py index eaa3d5e5c0eda..3a5b03695ee40 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_transform_timespan.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py @@ -34,7 +34,8 @@ ) from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -113,14 +114,14 @@ [delete_bucket_src, delete_bucket_dst], ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py b/providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py similarity index 94% rename from tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py rename to providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py index ae34aa639f957..6907a29fb5389 100644 --- a/tests/system/providers/google/cloud/gcs/example_gcs_upload_download.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py @@ -30,7 +30,8 @@ from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -91,14 +92,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_gdrive_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py similarity index 96% rename from tests/system/providers/google/cloud/gcs/example_gdrive_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py index a5842ac17a530..6ee188bd957ec 100644 --- a/tests/system/providers/google/cloud/gcs/example_gdrive_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py @@ -33,7 +33,8 @@ from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -164,13 +165,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_mssql_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_mssql_to_gcs.py similarity index 93% rename from tests/system/providers/google/cloud/gcs/example_mssql_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_mssql_to_gcs.py index 995361813c7ed..bc6ac79f1e0b1 100644 --- a/tests/system/providers/google/cloud/gcs/example_mssql_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_mssql_to_gcs.py @@ -23,7 +23,8 @@ from airflow.models.dag import DAG from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID try: from airflow.providers.google.cloud.transfers.mssql_to_gcs import MSSQLToGCSOperator @@ -77,14 +78,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_mysql_to_gcs.py similarity index 98% rename from tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_mysql_to_gcs.py index a673ab88f722e..cc53337e56934 100644 --- a/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_mysql_to_gcs.py @@ -295,13 +295,13 @@ def delete_connection(connection_id: str) -> None: mysql_to_gcs >> [delete_gcs_bucket, delete_firewall_rule, delete_gce_instance, delete_connection_task] delete_gce_instance >> delete_persistent_disk - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_oracle_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_oracle_to_gcs.py similarity index 92% rename from tests/system/providers/google/cloud/gcs/example_oracle_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_oracle_to_gcs.py index 8e2b6fb979ef8..c727eef1f66fb 100644 --- a/tests/system/providers/google/cloud/gcs/example_oracle_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_oracle_to_gcs.py @@ -23,7 +23,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.oracle_to_gcs import OracleToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -63,13 +64,13 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_s3_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_s3_to_gcs.py similarity index 95% rename from tests/system/providers/google/cloud/gcs/example_s3_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_s3_to_gcs.py index 8b53e8e531156..487dbbce552e2 100644 --- a/tests/system/providers/google/cloud/gcs/example_s3_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_s3_to_gcs.py @@ -27,7 +27,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.s3_to_gcs import S3ToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") GCP_PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -109,14 +110,14 @@ def upload_file(): >> delete_gcs_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py similarity index 95% rename from tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py index 2860d8552e101..994e7e8242e1e 100644 --- a/tests/system/providers/google/cloud/gcs/example_sftp_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py @@ -31,7 +31,8 @@ from airflow.providers.google.cloud.transfers.sftp_to_gcs import SFTPToGCSOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -118,14 +119,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_sheets.py b/providers/tests/system/google/cloud/gcs/example_sheets.py similarity index 97% rename from tests/system/providers/google/cloud/gcs/example_sheets.py rename to providers/tests/system/google/cloud/gcs/example_sheets.py index 2247819494f91..7d17379819168 100644 --- a/tests/system/providers/google/cloud/gcs/example_sheets.py +++ b/providers/tests/system/google/cloud/gcs/example_sheets.py @@ -145,13 +145,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_sheets_to_gcs.py similarity index 97% rename from tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_sheets_to_gcs.py index d4890cbbb1a44..08688c332563a 100644 --- a/tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_sheets_to_gcs.py @@ -120,13 +120,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py similarity index 98% rename from tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py rename to providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py index f192837437142..af92e550481f7 100644 --- a/tests/system/providers/google/cloud/gcs/example_trino_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py @@ -221,13 +221,13 @@ def safe_name(s: str) -> str: >> [delete_dataset, delete_bucket] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/cloud_run/__init__.py b/providers/tests/system/google/cloud/gcs/resources/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/cloud_run/__init__.py rename to providers/tests/system/google/cloud/gcs/resources/__init__.py diff --git a/tests/system/providers/google/cloud/gcs/resources/example_upload.txt b/providers/tests/system/google/cloud/gcs/resources/example_upload.txt similarity index 100% rename from tests/system/providers/google/cloud/gcs/resources/example_upload.txt rename to providers/tests/system/google/cloud/gcs/resources/example_upload.txt diff --git a/tests/system/providers/google/cloud/gcs/resources/tmp.tar.gz b/providers/tests/system/google/cloud/gcs/resources/tmp.tar.gz similarity index 100% rename from tests/system/providers/google/cloud/gcs/resources/tmp.tar.gz rename to providers/tests/system/google/cloud/gcs/resources/tmp.tar.gz diff --git a/tests/system/providers/google/cloud/gcs/resources/transform_script.py b/providers/tests/system/google/cloud/gcs/resources/transform_script.py similarity index 100% rename from tests/system/providers/google/cloud/gcs/resources/transform_script.py rename to providers/tests/system/google/cloud/gcs/resources/transform_script.py diff --git a/tests/system/providers/google/cloud/gcs/resources/transform_timespan.py b/providers/tests/system/google/cloud/gcs/resources/transform_timespan.py similarity index 100% rename from tests/system/providers/google/cloud/gcs/resources/transform_timespan.py rename to providers/tests/system/google/cloud/gcs/resources/transform_timespan.py diff --git a/tests/system/providers/google/cloud/gcs/resources/us-states.csv b/providers/tests/system/google/cloud/gcs/resources/us-states.csv similarity index 100% rename from tests/system/providers/google/cloud/gcs/resources/us-states.csv rename to providers/tests/system/google/cloud/gcs/resources/us-states.csv diff --git a/tests/system/providers/google/cloud/cloud_sql/__init__.py b/providers/tests/system/google/cloud/kubernetes_engine/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/cloud_sql/__init__.py rename to providers/tests/system/google/cloud/kubernetes_engine/__init__.py diff --git a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py similarity index 95% rename from tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py rename to providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py index 173fddad3a065..e9fe3f6836b94 100644 --- a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py @@ -32,7 +32,8 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "kubernetes_engine" @@ -112,14 +113,14 @@ create_cluster >> [pod_task, pod_task_xcom] >> delete_cluster pod_task_xcom >> pod_task_xcom_result - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py similarity index 95% rename from tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py rename to providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py index e974a628c7a52..f5cb8f570754f 100644 --- a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py @@ -32,7 +32,8 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") DAG_ID = "kubernetes_engine_async" @@ -115,14 +116,14 @@ create_cluster >> [pod_task, pod_task_xcom_async] >> delete_cluster pod_task_xcom_async >> pod_task_xcom_result - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py similarity index 97% rename from tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py rename to providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py index 2c7790c10220c..a4c56c17e5baf 100644 --- a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py @@ -179,14 +179,14 @@ delete_cluster, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py similarity index 97% rename from tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py rename to providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py index 5858256802a34..06c23432a923a 100644 --- a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py @@ -179,14 +179,14 @@ >> delete_cluster ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py similarity index 96% rename from tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py rename to providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py index 44479099d6a72..784ba994862b7 100644 --- a/tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py @@ -101,14 +101,14 @@ create_cluster >> create_resource_task >> delete_resource_task >> delete_cluster - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/composer/__init__.py b/providers/tests/system/google/cloud/life_sciences/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/composer/__init__.py rename to providers/tests/system/google/cloud/life_sciences/__init__.py diff --git a/tests/system/providers/google/cloud/life_sciences/example_life_sciences.py b/providers/tests/system/google/cloud/life_sciences/example_life_sciences.py similarity index 95% rename from tests/system/providers/google/cloud/life_sciences/example_life_sciences.py rename to providers/tests/system/google/cloud/life_sciences/example_life_sciences.py index f129dc38ea549..170eeb39ad1aa 100644 --- a/tests/system/providers/google/cloud/life_sciences/example_life_sciences.py +++ b/providers/tests/system/google/cloud/life_sciences/example_life_sciences.py @@ -27,7 +27,8 @@ from airflow.providers.google.cloud.operators.life_sciences import LifeSciencesRunPipelineOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -130,14 +131,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/compute/__init__.py b/providers/tests/system/google/cloud/life_sciences/resources/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/compute/__init__.py rename to providers/tests/system/google/cloud/life_sciences/resources/__init__.py diff --git a/tests/system/providers/google/cloud/life_sciences/resources/file b/providers/tests/system/google/cloud/life_sciences/resources/file similarity index 100% rename from tests/system/providers/google/cloud/life_sciences/resources/file rename to providers/tests/system/google/cloud/life_sciences/resources/file diff --git a/tests/system/providers/google/cloud/data_loss_prevention/__init__.py b/providers/tests/system/google/cloud/ml_engine/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/data_loss_prevention/__init__.py rename to providers/tests/system/google/cloud/ml_engine/__init__.py diff --git a/tests/system/providers/google/cloud/ml_engine/example_mlengine.py b/providers/tests/system/google/cloud/ml_engine/example_mlengine.py similarity index 98% rename from tests/system/providers/google/cloud/ml_engine/example_mlengine.py rename to providers/tests/system/google/cloud/ml_engine/example_mlengine.py index bde2c0bbaf9ee..f0eaf7cb30171 100644 --- a/tests/system/providers/google/cloud/ml_engine/example_mlengine.py +++ b/providers/tests/system/google/cloud/ml_engine/example_mlengine.py @@ -280,13 +280,13 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/data_loss_prevention/resources/__init__.py b/providers/tests/system/google/cloud/natural_language/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/data_loss_prevention/resources/__init__.py rename to providers/tests/system/google/cloud/natural_language/__init__.py diff --git a/tests/system/providers/google/cloud/natural_language/example_natural_language.py b/providers/tests/system/google/cloud/natural_language/example_natural_language.py similarity index 97% rename from tests/system/providers/google/cloud/natural_language/example_natural_language.py rename to providers/tests/system/google/cloud/natural_language/example_natural_language.py index e04fdf4fb601b..cc1aba0f165c7 100644 --- a/tests/system/providers/google/cloud/natural_language/example_natural_language.py +++ b/providers/tests/system/google/cloud/natural_language/example_natural_language.py @@ -119,13 +119,13 @@ analyze_sentiment >> analyze_sentiment_result analyze_classify_text >> analyze_classify_text_result - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/__init__.py b/providers/tests/system/google/cloud/pubsub/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/dataflow/__init__.py rename to providers/tests/system/google/cloud/pubsub/__init__.py diff --git a/tests/system/providers/google/cloud/pubsub/example_pubsub.py b/providers/tests/system/google/cloud/pubsub/example_pubsub.py similarity index 97% rename from tests/system/providers/google/cloud/pubsub/example_pubsub.py rename to providers/tests/system/google/cloud/pubsub/example_pubsub.py index 4ff3091e5fa53..93ab1cde6e0a8 100644 --- a/tests/system/providers/google/cloud/pubsub/example_pubsub.py +++ b/providers/tests/system/google/cloud/pubsub/example_pubsub.py @@ -146,14 +146,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/pubsub/example_pubsub_deferrable.py b/providers/tests/system/google/cloud/pubsub/example_pubsub_deferrable.py similarity index 96% rename from tests/system/providers/google/cloud/pubsub/example_pubsub_deferrable.py rename to providers/tests/system/google/cloud/pubsub/example_pubsub_deferrable.py index 22c0d012aea9c..a902ea5617f6d 100644 --- a/tests/system/providers/google/cloud/pubsub/example_pubsub_deferrable.py +++ b/providers/tests/system/google/cloud/pubsub/example_pubsub_deferrable.py @@ -101,14 +101,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataflow/resources/__init__.py b/providers/tests/system/google/cloud/spanner/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/dataflow/resources/__init__.py rename to providers/tests/system/google/cloud/spanner/__init__.py diff --git a/tests/system/providers/google/cloud/spanner/example_spanner.py b/providers/tests/system/google/cloud/spanner/example_spanner.py similarity index 96% rename from tests/system/providers/google/cloud/spanner/example_spanner.py rename to providers/tests/system/google/cloud/spanner/example_spanner.py index a7bef8f6ecd0f..b11a5cd61f92c 100644 --- a/tests/system/providers/google/cloud/spanner/example_spanner.py +++ b/providers/tests/system/google/cloud/spanner/example_spanner.py @@ -34,7 +34,8 @@ SpannerUpdateDatabaseInstanceOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -160,14 +161,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataform/__init__.py b/providers/tests/system/google/cloud/speech_to_text/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/dataform/__init__.py rename to providers/tests/system/google/cloud/speech_to_text/__init__.py diff --git a/tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py b/providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py similarity index 94% rename from tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py rename to providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py index d551144a53416..f2382a6053aa4 100644 --- a/tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py +++ b/providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py @@ -27,7 +27,8 @@ from airflow.providers.google.cloud.operators.speech_to_text import CloudSpeechToTextRecognizeSpeechOperator from airflow.providers.google.cloud.operators.text_to_speech import CloudTextToSpeechSynthesizeOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -88,14 +89,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/datafusion/__init__.py b/providers/tests/system/google/cloud/sql_to_sheets/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/datafusion/__init__.py rename to providers/tests/system/google/cloud/sql_to_sheets/__init__.py diff --git a/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py b/providers/tests/system/google/cloud/sql_to_sheets/example_sql_to_sheets.py similarity index 98% rename from tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py rename to providers/tests/system/google/cloud/sql_to_sheets/example_sql_to_sheets.py index 11231c0dfd40c..d34531ba52a0a 100644 --- a/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py +++ b/providers/tests/system/google/cloud/sql_to_sheets/example_sql_to_sheets.py @@ -315,13 +315,13 @@ def delete_connection(connection_id: str) -> None: ] delete_gce_instance >> delete_persistent_disk - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/datapipelines/__init__.py b/providers/tests/system/google/cloud/stackdriver/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/datapipelines/__init__.py rename to providers/tests/system/google/cloud/stackdriver/__init__.py diff --git a/tests/system/providers/google/cloud/stackdriver/example_stackdriver.py b/providers/tests/system/google/cloud/stackdriver/example_stackdriver.py similarity index 97% rename from tests/system/providers/google/cloud/stackdriver/example_stackdriver.py rename to providers/tests/system/google/cloud/stackdriver/example_stackdriver.py index ea8690e70dbee..202c9bcfbdccf 100644 --- a/tests/system/providers/google/cloud/stackdriver/example_stackdriver.py +++ b/providers/tests/system/google/cloud/stackdriver/example_stackdriver.py @@ -40,7 +40,8 @@ StackdriverUpsertNotificationChannelOperator, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -227,13 +228,13 @@ delete_alert_policy_2, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataplex/__init__.py b/providers/tests/system/google/cloud/storage_transfer/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/dataplex/__init__.py rename to providers/tests/system/google/cloud/storage_transfer/__init__.py diff --git a/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py similarity index 98% rename from tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py rename to providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py index e42be683ea52d..4d6a010954401 100644 --- a/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +++ b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py @@ -62,7 +62,8 @@ CloudDataTransferServiceJobStatusSensor, ) from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") GCP_PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -253,14 +254,14 @@ ] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py similarity index 98% rename from tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py rename to providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py index 2920409ce7547..b2cef2f831b5f 100644 --- a/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py +++ b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py @@ -195,13 +195,13 @@ >> [delete_transfer, delete_bucket_src, delete_bucket_dst] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py similarity index 96% rename from tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py rename to providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py index 53752d2079133..e670eba755428 100644 --- a/tests/system/providers/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py +++ b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py @@ -106,13 +106,13 @@ >> [delete_bucket_src, delete_bucket_dst] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataprep/__init__.py b/providers/tests/system/google/cloud/storage_transfer/resources/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/dataprep/__init__.py rename to providers/tests/system/google/cloud/storage_transfer/resources/__init__.py diff --git a/tests/system/providers/google/cloud/storage_transfer/resources/transfer_service_gcp_file b/providers/tests/system/google/cloud/storage_transfer/resources/transfer_service_gcp_file similarity index 100% rename from tests/system/providers/google/cloud/storage_transfer/resources/transfer_service_gcp_file rename to providers/tests/system/google/cloud/storage_transfer/resources/transfer_service_gcp_file diff --git a/tests/system/providers/google/cloud/storage_transfer/resources/transfer_service_gcs_to_gcs_file b/providers/tests/system/google/cloud/storage_transfer/resources/transfer_service_gcs_to_gcs_file similarity index 100% rename from tests/system/providers/google/cloud/storage_transfer/resources/transfer_service_gcs_to_gcs_file rename to providers/tests/system/google/cloud/storage_transfer/resources/transfer_service_gcs_to_gcs_file diff --git a/tests/system/providers/google/cloud/dataproc/__init__.py b/providers/tests/system/google/cloud/tasks/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/dataproc/__init__.py rename to providers/tests/system/google/cloud/tasks/__init__.py diff --git a/tests/system/providers/google/cloud/tasks/example_queue.py b/providers/tests/system/google/cloud/tasks/example_queue.py similarity index 97% rename from tests/system/providers/google/cloud/tasks/example_queue.py rename to providers/tests/system/google/cloud/tasks/example_queue.py index 4c29b584f5bfc..9797a29d04e22 100644 --- a/tests/system/providers/google/cloud/tasks/example_queue.py +++ b/providers/tests/system/google/cloud/tasks/example_queue.py @@ -161,14 +161,14 @@ def generate_random_string(): delete_queue, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/tasks/example_tasks.py b/providers/tests/system/google/cloud/tasks/example_tasks.py similarity index 97% rename from tests/system/providers/google/cloud/tasks/example_tasks.py rename to providers/tests/system/google/cloud/tasks/example_tasks.py index 1a85ac53187b3..0eae95fd1075d 100644 --- a/tests/system/providers/google/cloud/tasks/example_tasks.py +++ b/providers/tests/system/google/cloud/tasks/example_tasks.py @@ -152,14 +152,14 @@ def generate_random_string(): random_string, create_queue, create_task, tasks_get, list_tasks, run_task, delete_task, delete_queue ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/dataproc_metastore/__init__.py b/providers/tests/system/google/cloud/text_to_speech/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/dataproc_metastore/__init__.py rename to providers/tests/system/google/cloud/text_to_speech/__init__.py diff --git a/tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py b/providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py similarity index 93% rename from tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py rename to providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py index f5c12a44a4abf..0227049508a74 100644 --- a/tests/system/providers/google/cloud/text_to_speech/example_text_to_speech.py +++ b/providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py @@ -24,7 +24,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.operators.text_to_speech import CloudTextToSpeechSynthesizeOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -77,14 +78,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/datastore/__init__.py b/providers/tests/system/google/cloud/transfers/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/datastore/__init__.py rename to providers/tests/system/google/cloud/transfers/__init__.py diff --git a/tests/system/providers/google/cloud/transfers/example_gcs_to_sftp.py b/providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py similarity index 96% rename from tests/system/providers/google/cloud/transfers/example_gcs_to_sftp.py rename to providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py index 0caed0affa19a..8d3f19416313a 100644 --- a/tests/system/providers/google/cloud/transfers/example_gcs_to_sftp.py +++ b/providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py @@ -31,7 +31,8 @@ from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.providers.sftp.sensors.sftp import SFTPSensor from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -170,14 +171,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/transfers/example_gdrive_to_local.py b/providers/tests/system/google/cloud/transfers/example_gdrive_to_local.py similarity index 96% rename from tests/system/providers/google/cloud/transfers/example_gdrive_to_local.py rename to providers/tests/system/google/cloud/transfers/example_gdrive_to_local.py index d9ac60301b77b..abfcbabce0852 100644 --- a/tests/system/providers/google/cloud/transfers/example_gdrive_to_local.py +++ b/providers/tests/system/google/cloud/transfers/example_gdrive_to_local.py @@ -40,7 +40,8 @@ from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator from airflow.settings import Session, json from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -166,13 +167,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py b/providers/tests/system/google/cloud/transfers/example_postgres_to_gcs.py similarity index 98% rename from tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py rename to providers/tests/system/google/cloud/transfers/example_postgres_to_gcs.py index 33a289c1ffa1c..91bde027982ec 100644 --- a/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py +++ b/providers/tests/system/google/cloud/transfers/example_postgres_to_gcs.py @@ -290,13 +290,13 @@ def delete_connection(connection_id: str) -> None: postgres_to_gcs >> [delete_gcs_bucket, delete_firewall_rule, delete_gce_instance, delete_connection_task] delete_gce_instance >> delete_persistent_disk - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/gcs/__init__.py b/providers/tests/system/google/cloud/transfers/resources/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/gcs/__init__.py rename to providers/tests/system/google/cloud/transfers/resources/__init__.py diff --git a/tests/system/providers/google/cloud/transfers/resources/empty.txt b/providers/tests/system/google/cloud/transfers/resources/empty.txt similarity index 100% rename from tests/system/providers/google/cloud/transfers/resources/empty.txt rename to providers/tests/system/google/cloud/transfers/resources/empty.txt diff --git a/tests/system/providers/google/cloud/gcs/resources/__init__.py b/providers/tests/system/google/cloud/translate/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/gcs/resources/__init__.py rename to providers/tests/system/google/cloud/translate/__init__.py diff --git a/tests/system/providers/google/cloud/translate/example_translate.py b/providers/tests/system/google/cloud/translate/example_translate.py similarity index 94% rename from tests/system/providers/google/cloud/translate/example_translate.py rename to providers/tests/system/google/cloud/translate/example_translate.py index b593060f6e5bb..13a3ee061e308 100644 --- a/tests/system/providers/google/cloud/translate/example_translate.py +++ b/providers/tests/system/google/cloud/translate/example_translate.py @@ -56,14 +56,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/kubernetes_engine/__init__.py b/providers/tests/system/google/cloud/translate_speech/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/kubernetes_engine/__init__.py rename to providers/tests/system/google/cloud/translate_speech/__init__.py diff --git a/tests/system/providers/google/cloud/translate_speech/example_translate_speech.py b/providers/tests/system/google/cloud/translate_speech/example_translate_speech.py similarity index 96% rename from tests/system/providers/google/cloud/translate_speech/example_translate_speech.py rename to providers/tests/system/google/cloud/translate_speech/example_translate_speech.py index bb50adb4b62c5..4fe34d0b86f57 100644 --- a/tests/system/providers/google/cloud/translate_speech/example_translate_speech.py +++ b/providers/tests/system/google/cloud/translate_speech/example_translate_speech.py @@ -110,14 +110,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/life_sciences/__init__.py b/providers/tests/system/google/cloud/vertex_ai/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/life_sciences/__init__.py rename to providers/tests/system/google/cloud/vertex_ai/__init__.py diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py similarity index 97% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py index 03634b58f645b..8dd0ecc9c9550 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py @@ -150,13 +150,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py similarity index 97% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py index c26ea94325e76..bf96da992aca1 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py @@ -135,13 +135,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py similarity index 93% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py index 3303f219d4e1c..ca6c65e1a6ddd 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py @@ -52,13 +52,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py similarity index 97% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py index 91260eccdea57..fb32c7475c7f1 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py @@ -140,13 +140,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py similarity index 97% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py index cde6bb183e777..6c3db89382f24 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py @@ -145,13 +145,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py index 38198b5526874..78bb9ffa6bad4 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py @@ -231,13 +231,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_container.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_container.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py index dc09a8be90ed7..1295d20983eeb 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_container.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py @@ -208,13 +208,13 @@ def TABULAR_DATASET(bucket_name): # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py index 8762feb85ba39..3fa6169cb2759 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py @@ -250,13 +250,13 @@ def TABULAR_DATASET(bucket_name): # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py index 49a8d870bc394..fbc40b888216b 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py @@ -211,13 +211,13 @@ def TABULAR_DATASET(bucket_name): # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py index 77b69081a4734..3e1b98a2232d1 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py @@ -269,13 +269,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py index 8fa802b51744d..12b1181a79280 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py @@ -206,13 +206,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py index f9fe332b0af7d..7e696184eef41 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py @@ -155,14 +155,14 @@ ) # [END how_to_cloud_vertex_ai_run_evaluation_operator] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py similarity index 94% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py index 18958cb409e65..affba0b6aede8 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py @@ -56,13 +56,13 @@ ) # [END how_to_cloud_vertex_ai_supervised_fine_tuning_train_operator] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py index 913fff2b4e09b..55254741d9cc2 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py @@ -181,13 +181,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py similarity index 93% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py index 0a1a8d3fb247b..75de719b6aa9e 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py @@ -51,13 +51,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py similarity index 98% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py index b06f8287798df..3ad5537a10c29 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py @@ -345,13 +345,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py similarity index 97% rename from tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py rename to providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py index 7dd4aa84fe41e..5ab29ee999f16 100644 --- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py @@ -177,13 +177,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/life_sciences/resources/__init__.py b/providers/tests/system/google/cloud/video_intelligence/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/life_sciences/resources/__init__.py rename to providers/tests/system/google/cloud/video_intelligence/__init__.py diff --git a/tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py b/providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py similarity index 97% rename from tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py rename to providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py index 499db2d6427ba..e630fd1471a8a 100644 --- a/tests/system/providers/google/cloud/video_intelligence/example_video_intelligence.py +++ b/providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py @@ -154,14 +154,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/ml_engine/__init__.py b/providers/tests/system/google/cloud/vision/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/ml_engine/__init__.py rename to providers/tests/system/google/cloud/vision/__init__.py diff --git a/tests/system/providers/google/cloud/vision/example_vision_annotate_image.py b/providers/tests/system/google/cloud/vision/example_vision_annotate_image.py similarity index 97% rename from tests/system/providers/google/cloud/vision/example_vision_annotate_image.py rename to providers/tests/system/google/cloud/vision/example_vision_annotate_image.py index 2a4d7b75f1332..1e09fb7fe4f9d 100644 --- a/tests/system/providers/google/cloud/vision/example_vision_annotate_image.py +++ b/providers/tests/system/google/cloud/vision/example_vision_annotate_image.py @@ -41,7 +41,7 @@ # [START howto_operator_vision_enums_import] from google.cloud.vision_v1 import Feature # isort:skip -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [END howto_operator_vision_enums_import] @@ -191,14 +191,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vision/example_vision_autogenerated.py b/providers/tests/system/google/cloud/vision/example_vision_autogenerated.py similarity index 98% rename from tests/system/providers/google/cloud/vision/example_vision_autogenerated.py rename to providers/tests/system/google/cloud/vision/example_vision_autogenerated.py index 907386ceb295c..11fd3cdd54029 100644 --- a/tests/system/providers/google/cloud/vision/example_vision_autogenerated.py +++ b/providers/tests/system/google/cloud/vision/example_vision_autogenerated.py @@ -59,7 +59,7 @@ # [END howto_operator_vision_reference_image_import] # [START howto_operator_vision_enums_import] from google.cloud.vision_v1 import Feature # isort:skip -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [END howto_operator_vision_enums_import] @@ -268,14 +268,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vision/example_vision_explicit.py b/providers/tests/system/google/cloud/vision/example_vision_explicit.py similarity index 98% rename from tests/system/providers/google/cloud/vision/example_vision_explicit.py rename to providers/tests/system/google/cloud/vision/example_vision_explicit.py index 663be91259683..0c71be95bda7a 100644 --- a/tests/system/providers/google/cloud/vision/example_vision_explicit.py +++ b/providers/tests/system/google/cloud/vision/example_vision_explicit.py @@ -54,7 +54,7 @@ # [END howto_operator_vision_product_import_2] # [START howto_operator_vision_reference_image_import_2] from google.cloud.vision_v1.types import ReferenceImage # isort:skip -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID # [END howto_operator_vision_reference_image_import_2] @@ -279,14 +279,14 @@ delete_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/natural_language/__init__.py b/providers/tests/system/google/cloud/workflows/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/natural_language/__init__.py rename to providers/tests/system/google/cloud/workflows/__init__.py diff --git a/tests/system/providers/google/cloud/workflows/example_workflows.py b/providers/tests/system/google/cloud/workflows/example_workflows.py similarity index 98% rename from tests/system/providers/google/cloud/workflows/example_workflows.py rename to providers/tests/system/google/cloud/workflows/example_workflows.py index b010a146b9bd1..6d13484acbb42 100644 --- a/tests/system/providers/google/cloud/workflows/example_workflows.py +++ b/providers/tests/system/google/cloud/workflows/example_workflows.py @@ -227,14 +227,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/conftest.py b/providers/tests/system/google/conftest.py similarity index 100% rename from tests/system/providers/google/conftest.py rename to providers/tests/system/google/conftest.py diff --git a/tests/system/providers/google/cloud/pubsub/__init__.py b/providers/tests/system/google/datacatalog/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/pubsub/__init__.py rename to providers/tests/system/google/datacatalog/__init__.py diff --git a/tests/system/providers/google/datacatalog/example_datacatalog_entries.py b/providers/tests/system/google/datacatalog/example_datacatalog_entries.py similarity index 97% rename from tests/system/providers/google/datacatalog/example_datacatalog_entries.py rename to providers/tests/system/google/datacatalog/example_datacatalog_entries.py index 47edfb96368f1..0f84361d4fcc8 100644 --- a/tests/system/providers/google/datacatalog/example_datacatalog_entries.py +++ b/providers/tests/system/google/datacatalog/example_datacatalog_entries.py @@ -37,7 +37,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -199,14 +200,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py b/providers/tests/system/google/datacatalog/example_datacatalog_search_catalog.py similarity index 97% rename from tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py rename to providers/tests/system/google/datacatalog/example_datacatalog_search_catalog.py index 781d047c53469..a77a023f32338 100644 --- a/tests/system/providers/google/datacatalog/example_datacatalog_search_catalog.py +++ b/providers/tests/system/google/datacatalog/example_datacatalog_search_catalog.py @@ -39,7 +39,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -222,14 +223,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py b/providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py similarity index 97% rename from tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py rename to providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py index b8dd9170c3c02..af49d3de08fa7 100644 --- a/tests/system/providers/google/datacatalog/example_datacatalog_tag_templates.py +++ b/providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py @@ -36,7 +36,8 @@ ) from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -182,14 +183,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/datacatalog/example_datacatalog_tags.py b/providers/tests/system/google/datacatalog/example_datacatalog_tags.py similarity index 97% rename from tests/system/providers/google/datacatalog/example_datacatalog_tags.py rename to providers/tests/system/google/datacatalog/example_datacatalog_tags.py index 17397fcea2806..fdbdaa451056a 100644 --- a/tests/system/providers/google/datacatalog/example_datacatalog_tags.py +++ b/providers/tests/system/google/datacatalog/example_datacatalog_tags.py @@ -40,7 +40,8 @@ from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -232,14 +233,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/spanner/__init__.py b/providers/tests/system/google/firebase/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/spanner/__init__.py rename to providers/tests/system/google/firebase/__init__.py diff --git a/tests/providers/telegram/operators/__init__.py b/providers/tests/system/google/leveldb/__init__.py similarity index 100% rename from tests/providers/telegram/operators/__init__.py rename to providers/tests/system/google/leveldb/__init__.py diff --git a/tests/system/providers/google/leveldb/example_leveldb.py b/providers/tests/system/google/leveldb/example_leveldb.py similarity index 94% rename from tests/system/providers/google/leveldb/example_leveldb.py rename to providers/tests/system/google/leveldb/example_leveldb.py index 8474de830275c..8b11d9e87bebc 100644 --- a/tests/system/providers/google/leveldb/example_leveldb.py +++ b/providers/tests/system/google/leveldb/example_leveldb.py @@ -60,14 +60,14 @@ # [END howto_operator_leveldb_put_key] get_key_leveldb_task >> put_key_leveldb_task - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/teradata/hooks/__init__.py b/providers/tests/system/google/marketing_platform/__init__.py similarity index 100% rename from tests/providers/teradata/hooks/__init__.py rename to providers/tests/system/google/marketing_platform/__init__.py diff --git a/tests/system/providers/google/marketing_platform/example_analytics_admin.py b/providers/tests/system/google/marketing_platform/example_analytics_admin.py similarity index 98% rename from tests/system/providers/google/marketing_platform/example_analytics_admin.py rename to providers/tests/system/google/marketing_platform/example_analytics_admin.py index be1ad9f257f99..16ce6f8190bdc 100644 --- a/tests/system/providers/google/marketing_platform/example_analytics_admin.py +++ b/providers/tests/system/google/marketing_platform/example_analytics_admin.py @@ -219,13 +219,13 @@ def delete_connection(connection_id: str) -> None: # TEST TEARDOWN >> delete_connection_task ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/marketing_platform/example_campaign_manager.py b/providers/tests/system/google/marketing_platform/example_campaign_manager.py similarity index 98% rename from tests/system/providers/google/marketing_platform/example_campaign_manager.py rename to providers/tests/system/google/marketing_platform/example_campaign_manager.py index 932769eb52adc..0c3b26e5e98b5 100644 --- a/tests/system/providers/google/marketing_platform/example_campaign_manager.py +++ b/providers/tests/system/google/marketing_platform/example_campaign_manager.py @@ -55,7 +55,8 @@ ) from airflow.settings import Session from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -321,13 +322,13 @@ def delete_connection(connection_id: str) -> None: >> delete_connection(connection_id=CONNECTION_ID) ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/marketing_platform/example_search_ads.py b/providers/tests/system/google/marketing_platform/example_search_ads.py similarity index 97% rename from tests/system/providers/google/marketing_platform/example_search_ads.py rename to providers/tests/system/google/marketing_platform/example_search_ads.py index b8e71de90adc2..5d86a3ad1684d 100644 --- a/tests/system/providers/google/marketing_platform/example_search_ads.py +++ b/providers/tests/system/google/marketing_platform/example_search_ads.py @@ -102,7 +102,7 @@ (query_report >> get_field >> search_fields >> get_custom_column >> list_custom_columns) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/speech_to_text/__init__.py b/providers/tests/system/google/suite/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/speech_to_text/__init__.py rename to providers/tests/system/google/suite/__init__.py diff --git a/tests/system/providers/google/suite/example_local_to_drive.py b/providers/tests/system/google/suite/example_local_to_drive.py similarity index 97% rename from tests/system/providers/google/suite/example_local_to_drive.py rename to providers/tests/system/google/suite/example_local_to_drive.py index 2e79a932d4fb9..ac8ebfc5aabbc 100644 --- a/tests/system/providers/google/suite/example_local_to_drive.py +++ b/providers/tests/system/google/suite/example_local_to_drive.py @@ -141,13 +141,13 @@ def delete_connection(connection_id: str) -> None: >> delete_connection_task ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/sql_to_sheets/__init__.py b/providers/tests/system/google/suite/resources/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/sql_to_sheets/__init__.py rename to providers/tests/system/google/suite/resources/__init__.py diff --git a/tests/system/providers/google/suite/resources/test1 b/providers/tests/system/google/suite/resources/test1 similarity index 100% rename from tests/system/providers/google/suite/resources/test1 rename to providers/tests/system/google/suite/resources/test1 diff --git a/tests/system/providers/google/suite/resources/test2 b/providers/tests/system/google/suite/resources/test2 similarity index 100% rename from tests/system/providers/google/suite/resources/test2 rename to providers/tests/system/google/suite/resources/test2 diff --git a/tests/system/providers/google/cloud/stackdriver/__init__.py b/providers/tests/system/google/workplace/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/stackdriver/__init__.py rename to providers/tests/system/google/workplace/__init__.py diff --git a/tests/system/providers/google/cloud/storage_transfer/__init__.py b/providers/tests/system/http/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/storage_transfer/__init__.py rename to providers/tests/system/http/__init__.py diff --git a/tests/system/providers/http/example_http.py b/providers/tests/system/http/example_http.py similarity index 98% rename from tests/system/providers/http/example_http.py rename to providers/tests/system/http/example_http.py index bf5d08f086c18..98423943607cd 100644 --- a/tests/system/providers/http/example_http.py +++ b/providers/tests/system/http/example_http.py @@ -157,7 +157,7 @@ def get_next_page_cursor(response) -> dict | None: task_get_op_response_filter >> task_put_op >> task_del_op >> task_post_op_formenc task_post_op_formenc >> task_get_paginated -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/storage_transfer/resources/__init__.py b/providers/tests/system/influxdb/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/storage_transfer/resources/__init__.py rename to providers/tests/system/influxdb/__init__.py diff --git a/tests/system/providers/influxdb/example_influxdb.py b/providers/tests/system/influxdb/example_influxdb.py similarity index 94% rename from tests/system/providers/influxdb/example_influxdb.py rename to providers/tests/system/influxdb/example_influxdb.py index 9a71271c02d9c..8e4d486742d5f 100644 --- a/tests/system/providers/influxdb/example_influxdb.py +++ b/providers/tests/system/influxdb/example_influxdb.py @@ -61,13 +61,13 @@ def test_influxdb_hook(): ) as dag: test_influxdb_hook() - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/influxdb/example_influxdb_query.py b/providers/tests/system/influxdb/example_influxdb_query.py similarity index 95% rename from tests/system/providers/influxdb/example_influxdb_query.py rename to providers/tests/system/influxdb/example_influxdb_query.py index 6a0c14781aaba..95940e8c8f366 100644 --- a/tests/system/providers/influxdb/example_influxdb_query.py +++ b/providers/tests/system/influxdb/example_influxdb_query.py @@ -43,7 +43,7 @@ # [END howto_operator_influxdb] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/tasks/__init__.py b/providers/tests/system/jdbc/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/tasks/__init__.py rename to providers/tests/system/jdbc/__init__.py diff --git a/tests/system/providers/jdbc/example_jdbc_queries.py b/providers/tests/system/jdbc/example_jdbc_queries.py similarity index 94% rename from tests/system/providers/jdbc/example_jdbc_queries.py rename to providers/tests/system/jdbc/example_jdbc_queries.py index 0da4b8d4479aa..ce9234a6fb0b7 100644 --- a/tests/system/providers/jdbc/example_jdbc_queries.py +++ b/providers/tests/system/jdbc/example_jdbc_queries.py @@ -59,13 +59,13 @@ delete_data >> insert_data >> run_this_last - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/text_to_speech/__init__.py b/providers/tests/system/jenkins/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/text_to_speech/__init__.py rename to providers/tests/system/jenkins/__init__.py diff --git a/tests/system/providers/jenkins/example_jenkins_job_trigger.py b/providers/tests/system/jenkins/example_jenkins_job_trigger.py similarity index 97% rename from tests/system/providers/jenkins/example_jenkins_job_trigger.py rename to providers/tests/system/jenkins/example_jenkins_job_trigger.py index e7fa2e073be97..6b7fab62a0d36 100644 --- a/tests/system/providers/jenkins/example_jenkins_job_trigger.py +++ b/providers/tests/system/jenkins/example_jenkins_job_trigger.py @@ -72,7 +72,7 @@ def grab_artifact_from_jenkins(url): # job_trigger >> grab_artifact_from_jenkins() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/transfers/__init__.py b/providers/tests/system/microsoft/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/transfers/__init__.py rename to providers/tests/system/microsoft/__init__.py diff --git a/tests/providers/teradata/operators/__init__.py b/providers/tests/system/microsoft/azure/__init__.py similarity index 100% rename from tests/providers/teradata/operators/__init__.py rename to providers/tests/system/microsoft/azure/__init__.py diff --git a/tests/system/providers/microsoft/azure/example_adf_run_pipeline.py b/providers/tests/system/microsoft/azure/example_adf_run_pipeline.py similarity index 96% rename from tests/system/providers/microsoft/azure/example_adf_run_pipeline.py rename to providers/tests/system/microsoft/azure/example_adf_run_pipeline.py index d883f5128737f..a6eae4d71486d 100644 --- a/tests/system/providers/microsoft/azure/example_adf_run_pipeline.py +++ b/providers/tests/system/microsoft/azure/example_adf_run_pipeline.py @@ -108,13 +108,13 @@ # Task dependency created via `XComArgs`: # run_pipeline2 >> pipeline_run_sensor - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_adls_create.py b/providers/tests/system/microsoft/azure/example_adls_create.py similarity index 93% rename from tests/system/providers/microsoft/azure/example_adls_create.py rename to providers/tests/system/microsoft/azure/example_adls_create.py index 726e9eba76ae8..3f76525b6b87e 100644 --- a/tests/system/providers/microsoft/azure/example_adls_create.py +++ b/providers/tests/system/microsoft/azure/example_adls_create.py @@ -46,13 +46,13 @@ upload_data >> delete_file - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_adls_delete.py b/providers/tests/system/microsoft/azure/example_adls_delete.py similarity index 93% rename from tests/system/providers/microsoft/azure/example_adls_delete.py rename to providers/tests/system/microsoft/azure/example_adls_delete.py index 2b1977e938dc2..34abc6a9b2dae 100644 --- a/tests/system/providers/microsoft/azure/example_adls_delete.py +++ b/providers/tests/system/microsoft/azure/example_adls_delete.py @@ -46,13 +46,13 @@ upload_file >> remove_file - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_adls_list.py b/providers/tests/system/microsoft/azure/example_adls_list.py similarity index 92% rename from tests/system/providers/microsoft/azure/example_adls_list.py rename to providers/tests/system/microsoft/azure/example_adls_list.py index a6bd2d7bd6652..594b161aaa9d6 100644 --- a/tests/system/providers/microsoft/azure/example_adls_list.py +++ b/providers/tests/system/microsoft/azure/example_adls_list.py @@ -42,13 +42,13 @@ ) # [END howto_operator_adls_list] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_azure_batch_operator.py b/providers/tests/system/microsoft/azure/example_azure_batch_operator.py similarity index 96% rename from tests/system/providers/microsoft/azure/example_azure_batch_operator.py rename to providers/tests/system/microsoft/azure/example_azure_batch_operator.py index 85977f2a0a6e4..944c737f96a0d 100644 --- a/tests/system/providers/microsoft/azure/example_azure_batch_operator.py +++ b/providers/tests/system/microsoft/azure/example_azure_batch_operator.py @@ -57,7 +57,7 @@ ) # [END howto_azure_batch_operator] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_azure_container_instances.py b/providers/tests/system/microsoft/azure/example_azure_container_instances.py similarity index 97% rename from tests/system/providers/microsoft/azure/example_azure_container_instances.py rename to providers/tests/system/microsoft/azure/example_azure_container_instances.py index 0db79a71837b1..ee9b025193e36 100644 --- a/tests/system/providers/microsoft/azure/example_azure_container_instances.py +++ b/providers/tests/system/microsoft/azure/example_azure_container_instances.py @@ -90,7 +90,7 @@ cpu=1.0, task_id="start_container_with_azure_container_volume", ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_azure_cosmosdb.py b/providers/tests/system/microsoft/azure/example_azure_cosmosdb.py similarity index 94% rename from tests/system/providers/microsoft/azure/example_azure_cosmosdb.py rename to providers/tests/system/microsoft/azure/example_azure_cosmosdb.py index d48d636f28dbf..1d4bb60b05163 100644 --- a/tests/system/providers/microsoft/azure/example_azure_cosmosdb.py +++ b/providers/tests/system/microsoft/azure/example_azure_cosmosdb.py @@ -64,13 +64,13 @@ t1 >> t2 - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_azure_service_bus.py b/providers/tests/system/microsoft/azure/example_azure_service_bus.py similarity index 98% rename from tests/system/providers/microsoft/azure/example_azure_service_bus.py rename to providers/tests/system/microsoft/azure/example_azure_service_bus.py index e0b8558ffec10..bfa77db40dba0 100644 --- a/tests/system/providers/microsoft/azure/example_azure_service_bus.py +++ b/providers/tests/system/microsoft/azure/example_azure_service_bus.py @@ -172,13 +172,13 @@ delete_service_bus_queue, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_azure_synapse.py b/providers/tests/system/microsoft/azure/example_azure_synapse.py similarity index 97% rename from tests/system/providers/microsoft/azure/example_azure_synapse.py rename to providers/tests/system/microsoft/azure/example_azure_synapse.py index 7465b71677deb..c7f7800e57834 100644 --- a/tests/system/providers/microsoft/azure/example_azure_synapse.py +++ b/providers/tests/system/microsoft/azure/example_azure_synapse.py @@ -69,7 +69,7 @@ ) # [END howto_operator_azure_synapse] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_fileshare.py b/providers/tests/system/microsoft/azure/example_fileshare.py similarity index 93% rename from tests/system/providers/microsoft/azure/example_fileshare.py rename to providers/tests/system/microsoft/azure/example_fileshare.py index f306a0188efa9..bfa9819dbef68 100644 --- a/tests/system/providers/microsoft/azure/example_fileshare.py +++ b/providers/tests/system/microsoft/azure/example_fileshare.py @@ -55,13 +55,13 @@ def delete_fileshare(): ) as dag: create_fileshare() >> delete_fileshare() - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_local_to_adls.py b/providers/tests/system/microsoft/azure/example_local_to_adls.py similarity index 93% rename from tests/system/providers/microsoft/azure/example_local_to_adls.py rename to providers/tests/system/microsoft/azure/example_local_to_adls.py index f5a75e7ce414b..d540aaf34d993 100644 --- a/tests/system/providers/microsoft/azure/example_local_to_adls.py +++ b/providers/tests/system/microsoft/azure/example_local_to_adls.py @@ -47,13 +47,13 @@ upload_file >> delete_file - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_local_to_wasb.py b/providers/tests/system/microsoft/azure/example_local_to_wasb.py similarity index 94% rename from tests/system/providers/microsoft/azure/example_local_to_wasb.py rename to providers/tests/system/microsoft/azure/example_local_to_wasb.py index b03c11e6b3673..7e368ca839928 100644 --- a/tests/system/providers/microsoft/azure/example_local_to_wasb.py +++ b/providers/tests/system/microsoft/azure/example_local_to_wasb.py @@ -49,13 +49,13 @@ upload >> delete - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_msfabric.py b/providers/tests/system/microsoft/azure/example_msfabric.py similarity index 94% rename from tests/system/providers/microsoft/azure/example_msfabric.py rename to providers/tests/system/microsoft/azure/example_msfabric.py index 5f8b0657c4019..b1113025286d4 100644 --- a/tests/system/providers/microsoft/azure/example_msfabric.py +++ b/providers/tests/system/microsoft/azure/example_msfabric.py @@ -51,13 +51,13 @@ ) # [END howto_operator_ms_fabric_create_item_schedule] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_msgraph.py b/providers/tests/system/microsoft/azure/example_msgraph.py similarity index 93% rename from tests/system/providers/microsoft/azure/example_msgraph.py rename to providers/tests/system/microsoft/azure/example_msgraph.py index 5ff7ba6f88835..33ee00468523b 100644 --- a/tests/system/providers/microsoft/azure/example_msgraph.py +++ b/providers/tests/system/microsoft/azure/example_msgraph.py @@ -49,13 +49,13 @@ site_task >> site_pages_task - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_powerbi.py b/providers/tests/system/microsoft/azure/example_powerbi.py similarity index 96% rename from tests/system/providers/microsoft/azure/example_powerbi.py rename to providers/tests/system/microsoft/azure/example_powerbi.py index 0a1bfde54a7a9..b5a982a1159a5 100644 --- a/tests/system/providers/microsoft/azure/example_powerbi.py +++ b/providers/tests/system/microsoft/azure/example_powerbi.py @@ -97,13 +97,13 @@ workspaces_task >> workspaces_info_task >> check_workspace_status_task refresh_dataset_task >> refresh_dataset_history_task - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_powerbi_dataset_refresh.py b/providers/tests/system/microsoft/azure/example_powerbi_dataset_refresh.py similarity index 95% rename from tests/system/providers/microsoft/azure/example_powerbi_dataset_refresh.py rename to providers/tests/system/microsoft/azure/example_powerbi_dataset_refresh.py index 52f1f001e9988..c02cec3e57f5e 100644 --- a/tests/system/providers/microsoft/azure/example_powerbi_dataset_refresh.py +++ b/providers/tests/system/microsoft/azure/example_powerbi_dataset_refresh.py @@ -76,13 +76,13 @@ def create_connection(conn_id_name: str): refresh_powerbi_dataset, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_s3_to_wasb.py b/providers/tests/system/microsoft/azure/example_s3_to_wasb.py similarity index 94% rename from tests/system/providers/microsoft/azure/example_s3_to_wasb.py rename to providers/tests/system/microsoft/azure/example_s3_to_wasb.py index 48fd428b9cde4..88ae64324d17e 100644 --- a/tests/system/providers/microsoft/azure/example_s3_to_wasb.py +++ b/providers/tests/system/microsoft/azure/example_s3_to_wasb.py @@ -29,7 +29,8 @@ ) from airflow.providers.microsoft.azure.transfers.s3_to_wasb import S3ToAzureBlobStorageOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder + +from providers.tests.system.amazon.aws.utils import SystemTestContextBuilder sys_test_context_task = SystemTestContextBuilder().build() @@ -102,13 +103,13 @@ remove_s3_bucket, ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure when "tearDown" task with trigger # rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_sftp_to_wasb.py b/providers/tests/system/microsoft/azure/example_sftp_to_wasb.py similarity index 95% rename from tests/system/providers/microsoft/azure/example_sftp_to_wasb.py rename to providers/tests/system/microsoft/azure/example_sftp_to_wasb.py index d80539cb4a238..42e5968304f08 100644 --- a/tests/system/providers/microsoft/azure/example_sftp_to_wasb.py +++ b/providers/tests/system/microsoft/azure/example_sftp_to_wasb.py @@ -75,13 +75,13 @@ def delete_sftp_file(): transfer_files_to_sftp_step >> transfer_files_to_azure >> delete_blob_file_step >> delete_sftp_file() - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_synapse_run_pipeline.py b/providers/tests/system/microsoft/azure/example_synapse_run_pipeline.py similarity index 93% rename from tests/system/providers/microsoft/azure/example_synapse_run_pipeline.py rename to providers/tests/system/microsoft/azure/example_synapse_run_pipeline.py index 6b69bd8972b4f..de87d6b4c2483 100644 --- a/tests/system/providers/microsoft/azure/example_synapse_run_pipeline.py +++ b/providers/tests/system/microsoft/azure/example_synapse_run_pipeline.py @@ -43,13 +43,13 @@ # [END howto_operator_azure_synapse_run_pipeline] begin >> run_pipeline1 - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/example_wasb_sensors.py b/providers/tests/system/microsoft/azure/example_wasb_sensors.py similarity index 96% rename from tests/system/providers/microsoft/azure/example_wasb_sensors.py rename to providers/tests/system/microsoft/azure/example_wasb_sensors.py index 806a863cbfbc3..56c1dce34d688 100644 --- a/tests/system/providers/microsoft/azure/example_wasb_sensors.py +++ b/providers/tests/system/microsoft/azure/example_wasb_sensors.py @@ -62,7 +62,7 @@ # [END wasb_prefix_sensor] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/transfers/resources/__init__.py b/providers/tests/system/microsoft/mssql/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/transfers/resources/__init__.py rename to providers/tests/system/microsoft/mssql/__init__.py diff --git a/tests/system/providers/microsoft/mssql/create_table.sql b/providers/tests/system/microsoft/mssql/create_table.sql similarity index 100% rename from tests/system/providers/microsoft/mssql/create_table.sql rename to providers/tests/system/microsoft/mssql/create_table.sql diff --git a/tests/system/providers/microsoft/mssql/example_mssql.py b/providers/tests/system/microsoft/mssql/example_mssql.py similarity index 97% rename from tests/system/providers/microsoft/mssql/example_mssql.py rename to providers/tests/system/microsoft/mssql/example_mssql.py index 957a8a26c2980..12e2815c59d01 100644 --- a/tests/system/providers/microsoft/mssql/example_mssql.py +++ b/providers/tests/system/microsoft/mssql/example_mssql.py @@ -146,12 +146,12 @@ def insert_mssql_hook(): ) # [END mssql_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/translate/__init__.py b/providers/tests/system/microsoft/winrm/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/translate/__init__.py rename to providers/tests/system/microsoft/winrm/__init__.py diff --git a/tests/system/providers/microsoft/winrm/example_winrm.py b/providers/tests/system/microsoft/winrm/example_winrm.py similarity index 95% rename from tests/system/providers/microsoft/winrm/example_winrm.py rename to providers/tests/system/microsoft/winrm/example_winrm.py index 60a0149dc235a..1de1cb8c0fd68 100644 --- a/tests/system/providers/microsoft/winrm/example_winrm.py +++ b/providers/tests/system/microsoft/winrm/example_winrm.py @@ -64,13 +64,13 @@ [t1, t2, t3] >> run_this_last - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/translate_speech/__init__.py b/providers/tests/system/mysql/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/translate_speech/__init__.py rename to providers/tests/system/mysql/__init__.py diff --git a/tests/system/providers/mysql/example_mysql.py b/providers/tests/system/mysql/example_mysql.py similarity index 95% rename from tests/system/providers/mysql/example_mysql.py rename to providers/tests/system/mysql/example_mysql.py index a890b7846ec95..6f0f884197f3a 100644 --- a/tests/system/providers/mysql/example_mysql.py +++ b/providers/tests/system/mysql/example_mysql.py @@ -59,7 +59,7 @@ drop_table_mysql_task >> mysql_task -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vertex_ai/__init__.py b/providers/tests/system/neo4j/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/vertex_ai/__init__.py rename to providers/tests/system/neo4j/__init__.py diff --git a/tests/system/providers/neo4j/example_neo4j.py b/providers/tests/system/neo4j/example_neo4j.py similarity index 95% rename from tests/system/providers/neo4j/example_neo4j.py rename to providers/tests/system/neo4j/example_neo4j.py index 0aea16f736dba..80db3fbb2ab1d 100644 --- a/tests/system/providers/neo4j/example_neo4j.py +++ b/providers/tests/system/neo4j/example_neo4j.py @@ -48,7 +48,7 @@ # [END run_query_neo4j_operator] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/video_intelligence/__init__.py b/providers/tests/system/openai/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/video_intelligence/__init__.py rename to providers/tests/system/openai/__init__.py diff --git a/tests/system/providers/openai/example_openai.py b/providers/tests/system/openai/example_openai.py similarity index 97% rename from tests/system/providers/openai/example_openai.py rename to providers/tests/system/openai/example_openai.py index ec8dd316b3ed0..d342207b5d09b 100644 --- a/tests/system/providers/openai/example_openai.py +++ b/providers/tests/system/openai/example_openai.py @@ -104,7 +104,7 @@ def task_to_store_input_text_in_xcom(): example_openai_dag() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/openai/example_trigger_batch_operator.py b/providers/tests/system/openai/example_trigger_batch_operator.py similarity index 97% rename from tests/system/providers/openai/example_trigger_batch_operator.py rename to providers/tests/system/openai/example_trigger_batch_operator.py index 6f01f648ccc7b..3dae1df5d9d0b 100644 --- a/tests/system/providers/openai/example_trigger_batch_operator.py +++ b/providers/tests/system/openai/example_trigger_batch_operator.py @@ -111,7 +111,7 @@ def cleanup_batch_output_file(batch_id, **context): openai_batch_chat_completions() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/vision/__init__.py b/providers/tests/system/opensearch/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/vision/__init__.py rename to providers/tests/system/opensearch/__init__.py diff --git a/tests/system/providers/opensearch/example_opensearch.py b/providers/tests/system/opensearch/example_opensearch.py similarity index 96% rename from tests/system/providers/opensearch/example_opensearch.py rename to providers/tests/system/opensearch/example_opensearch.py index b82af8ebddf16..e339fbe788481 100644 --- a/tests/system/providers/opensearch/example_opensearch.py +++ b/providers/tests/system/opensearch/example_opensearch.py @@ -118,14 +118,14 @@ def load_connections(): chain(create_index, add_document_by_class, add_document_by_args, search_high_level, search_low_level) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/cloud/workflows/__init__.py b/providers/tests/system/opsgenie/__init__.py similarity index 100% rename from tests/system/providers/google/cloud/workflows/__init__.py rename to providers/tests/system/opsgenie/__init__.py diff --git a/tests/system/providers/opsgenie/example_opsgenie_alert.py b/providers/tests/system/opsgenie/example_opsgenie_alert.py similarity index 96% rename from tests/system/providers/opsgenie/example_opsgenie_alert.py rename to providers/tests/system/opsgenie/example_opsgenie_alert.py index 30fdabc0d5e95..cf11f824eec8c 100644 --- a/tests/system/providers/opsgenie/example_opsgenie_alert.py +++ b/providers/tests/system/opsgenie/example_opsgenie_alert.py @@ -51,7 +51,7 @@ ) # [END howto_opsgenie_delete_alert_operator] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/opsgenie/example_opsgenie_notifier.py b/providers/tests/system/opsgenie/example_opsgenie_notifier.py similarity index 96% rename from tests/system/providers/opsgenie/example_opsgenie_notifier.py rename to providers/tests/system/opsgenie/example_opsgenie_notifier.py index 10edf8debdaed..97f5945aa9772 100644 --- a/tests/system/providers/opsgenie/example_opsgenie_notifier.py +++ b/providers/tests/system/opsgenie/example_opsgenie_notifier.py @@ -37,7 +37,7 @@ ) # [END howto_notifier_opsgenie] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/providers/teradata/transfers/__init__.py b/providers/tests/system/papermill/__init__.py similarity index 100% rename from tests/providers/teradata/transfers/__init__.py rename to providers/tests/system/papermill/__init__.py diff --git a/tests/system/providers/papermill/conftest.py b/providers/tests/system/papermill/conftest.py similarity index 100% rename from tests/system/providers/papermill/conftest.py rename to providers/tests/system/papermill/conftest.py diff --git a/tests/system/providers/papermill/example_papermill.py b/providers/tests/system/papermill/example_papermill.py similarity index 96% rename from tests/system/providers/papermill/example_papermill.py rename to providers/tests/system/papermill/example_papermill.py index 199fd6f64d289..529647fe4e719 100644 --- a/tests/system/providers/papermill/example_papermill.py +++ b/providers/tests/system/papermill/example_papermill.py @@ -52,7 +52,7 @@ ) # [END howto_operator_papermill] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/papermill/example_papermill_remote_verify.py b/providers/tests/system/papermill/example_papermill_remote_verify.py similarity index 97% rename from tests/system/providers/papermill/example_papermill_remote_verify.py rename to providers/tests/system/papermill/example_papermill_remote_verify.py index b4cf6249be241..f325928b8cd01 100644 --- a/tests/system/providers/papermill/example_papermill_remote_verify.py +++ b/providers/tests/system/papermill/example_papermill_remote_verify.py @@ -75,7 +75,7 @@ def check_notebook(output_notebook, execution_date): ) # [END howto_verify_operator_papermill_remote_kernel] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/papermill/example_papermill_verify.py b/providers/tests/system/papermill/example_papermill_verify.py similarity index 97% rename from tests/system/providers/papermill/example_papermill_verify.py rename to providers/tests/system/papermill/example_papermill_verify.py index af24dbdf8ff77..cfae2cc6ed3e0 100644 --- a/tests/system/providers/papermill/example_papermill_verify.py +++ b/providers/tests/system/papermill/example_papermill_verify.py @@ -73,7 +73,7 @@ def check_notebook(inlets, execution_date): run_this >> check_notebook(inlets=AUTO, execution_date="{{ execution_date }}") # [END howto_verify_operator_papermill] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/papermill/input_notebook.ipynb b/providers/tests/system/papermill/input_notebook.ipynb similarity index 100% rename from tests/system/providers/papermill/input_notebook.ipynb rename to providers/tests/system/papermill/input_notebook.ipynb diff --git a/tests/system/providers/google/datacatalog/__init__.py b/providers/tests/system/pgvector/__init__.py similarity index 100% rename from tests/system/providers/google/datacatalog/__init__.py rename to providers/tests/system/pgvector/__init__.py diff --git a/tests/system/providers/pgvector/example_pgvector.py b/providers/tests/system/pgvector/example_pgvector.py similarity index 97% rename from tests/system/providers/pgvector/example_pgvector.py rename to providers/tests/system/pgvector/example_pgvector.py index 8d1d0f0b3703e..5c16b9da307cb 100644 --- a/tests/system/providers/pgvector/example_pgvector.py +++ b/providers/tests/system/pgvector/example_pgvector.py @@ -76,7 +76,7 @@ def cleanup_postgres_objects(): example_pgvector_dag() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/pgvector/example_pgvector_openai.py b/providers/tests/system/pgvector/example_pgvector_openai.py similarity index 97% rename from tests/system/providers/pgvector/example_pgvector_openai.py rename to providers/tests/system/pgvector/example_pgvector_openai.py index 9b7999436f348..9c8f841e8f933 100644 --- a/tests/system/providers/pgvector/example_pgvector_openai.py +++ b/providers/tests/system/pgvector/example_pgvector_openai.py @@ -90,7 +90,7 @@ def cleanup_postgres_objects(): example_pgvector_dag() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/firebase/__init__.py b/providers/tests/system/pinecone/__init__.py similarity index 100% rename from tests/system/providers/google/firebase/__init__.py rename to providers/tests/system/pinecone/__init__.py diff --git a/tests/system/providers/pinecone/example_create_pod_index.py b/providers/tests/system/pinecone/example_create_pod_index.py similarity index 96% rename from tests/system/providers/pinecone/example_create_pod_index.py rename to providers/tests/system/pinecone/example_create_pod_index.py index a2d7f16c6934e..e0342e2d62008 100644 --- a/tests/system/providers/pinecone/example_create_pod_index.py +++ b/providers/tests/system/pinecone/example_create_pod_index.py @@ -56,7 +56,7 @@ def delete_index(): create_index >> delete_index() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/pinecone/example_create_serverless_index.py b/providers/tests/system/pinecone/example_create_serverless_index.py similarity index 96% rename from tests/system/providers/pinecone/example_create_serverless_index.py rename to providers/tests/system/pinecone/example_create_serverless_index.py index cf1e2c5cee4b5..28078b1b608cd 100644 --- a/tests/system/providers/pinecone/example_create_serverless_index.py +++ b/providers/tests/system/pinecone/example_create_serverless_index.py @@ -55,7 +55,7 @@ def delete_index(): create_index >> delete_index() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/pinecone/example_dag_pinecone.py b/providers/tests/system/pinecone/example_dag_pinecone.py similarity index 95% rename from tests/system/providers/pinecone/example_dag_pinecone.py rename to providers/tests/system/pinecone/example_dag_pinecone.py index 8bf59febeca93..744f6518dceae 100644 --- a/tests/system/providers/pinecone/example_dag_pinecone.py +++ b/providers/tests/system/pinecone/example_dag_pinecone.py @@ -46,7 +46,7 @@ # [END howto_operator_pinecone_ingest] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/pinecone/example_pinecone_cohere.py b/providers/tests/system/pinecone/example_pinecone_cohere.py similarity index 97% rename from tests/system/providers/pinecone/example_pinecone_cohere.py rename to providers/tests/system/pinecone/example_pinecone_cohere.py index 80e6766484d6b..80e9b1efad8eb 100644 --- a/tests/system/providers/pinecone/example_pinecone_cohere.py +++ b/providers/tests/system/pinecone/example_pinecone_cohere.py @@ -76,7 +76,7 @@ def delete_index(): create_index() >> embed_task >> transformed_output >> perform_ingestion >> delete_index() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/pinecone/example_pinecone_openai.py b/providers/tests/system/pinecone/example_pinecone_openai.py similarity index 98% rename from tests/system/providers/pinecone/example_pinecone_openai.py rename to providers/tests/system/pinecone/example_pinecone_openai.py index d338e25542ce0..dad83f48085a5 100644 --- a/tests/system/providers/pinecone/example_pinecone_openai.py +++ b/providers/tests/system/pinecone/example_pinecone_openai.py @@ -107,7 +107,7 @@ def delete_index(): create_index >> embed_task >> perform_ingestion >> delete_index() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/suite/__init__.py b/providers/tests/system/postgres/__init__.py similarity index 100% rename from tests/system/providers/google/suite/__init__.py rename to providers/tests/system/postgres/__init__.py diff --git a/tests/system/providers/postgres/example_postgres.py b/providers/tests/system/postgres/example_postgres.py similarity index 96% rename from tests/system/providers/postgres/example_postgres.py rename to providers/tests/system/postgres/example_postgres.py index aab5c1951799b..5ffaf6ea0098a 100644 --- a/tests/system/providers/postgres/example_postgres.py +++ b/providers/tests/system/postgres/example_postgres.py @@ -80,13 +80,13 @@ create_pet_table >> populate_pet_table >> get_all_pets >> get_birth_date # [END postgres_sql_execute_query_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/suite/resources/__init__.py b/providers/tests/system/presto/__init__.py similarity index 100% rename from tests/system/providers/google/suite/resources/__init__.py rename to providers/tests/system/presto/__init__.py diff --git a/tests/system/providers/presto/example_gcs_to_presto.py b/providers/tests/system/presto/example_gcs_to_presto.py similarity index 95% rename from tests/system/providers/presto/example_gcs_to_presto.py rename to providers/tests/system/presto/example_gcs_to_presto.py index 19b4a3a4fba5e..ceccf098471dc 100644 --- a/tests/system/providers/presto/example_gcs_to_presto.py +++ b/providers/tests/system/presto/example_gcs_to_presto.py @@ -50,7 +50,7 @@ # [END gcs_csv_to_presto_table] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/google/workplace/__init__.py b/providers/tests/system/qdrant/__init__.py similarity index 100% rename from tests/system/providers/google/workplace/__init__.py rename to providers/tests/system/qdrant/__init__.py diff --git a/tests/system/providers/qdrant/example_dag_qdrant.py b/providers/tests/system/qdrant/example_dag_qdrant.py similarity index 96% rename from tests/system/providers/qdrant/example_dag_qdrant.py rename to providers/tests/system/qdrant/example_dag_qdrant.py index 8f55d2d72c559..85c92d9c99312 100644 --- a/tests/system/providers/qdrant/example_dag_qdrant.py +++ b/providers/tests/system/qdrant/example_dag_qdrant.py @@ -43,7 +43,7 @@ # [END howto_operator_qdrant_ingest] -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/http/__init__.py b/providers/tests/system/redis/__init__.py similarity index 100% rename from tests/system/providers/http/__init__.py rename to providers/tests/system/redis/__init__.py diff --git a/tests/system/providers/redis/example_redis_publish.py b/providers/tests/system/redis/example_redis_publish.py similarity index 95% rename from tests/system/providers/redis/example_redis_publish.py rename to providers/tests/system/redis/example_redis_publish.py index 9d50593c04003..335256f68e57d 100644 --- a/tests/system/providers/redis/example_redis_publish.py +++ b/providers/tests/system/redis/example_redis_publish.py @@ -73,13 +73,13 @@ publish_task >> key_sensor_task - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/influxdb/__init__.py b/providers/tests/system/salesforce/__init__.py similarity index 100% rename from tests/system/providers/influxdb/__init__.py rename to providers/tests/system/salesforce/__init__.py diff --git a/tests/system/providers/salesforce/example_bulk.py b/providers/tests/system/salesforce/example_bulk.py similarity index 97% rename from tests/system/providers/salesforce/example_bulk.py rename to providers/tests/system/salesforce/example_bulk.py index 06764aaf54b42..7d43de648805d 100644 --- a/tests/system/providers/salesforce/example_bulk.py +++ b/providers/tests/system/salesforce/example_bulk.py @@ -90,7 +90,7 @@ # [END howto_salesforce_bulk_delete_operation] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/salesforce/example_salesforce_apex_rest.py b/providers/tests/system/salesforce/example_salesforce_apex_rest.py similarity index 95% rename from tests/system/providers/salesforce/example_salesforce_apex_rest.py rename to providers/tests/system/salesforce/example_salesforce_apex_rest.py index 9bfdb94839621..2961ffa4e4784 100644 --- a/tests/system/providers/salesforce/example_salesforce_apex_rest.py +++ b/providers/tests/system/salesforce/example_salesforce_apex_rest.py @@ -41,7 +41,7 @@ # [END howto_salesforce_apex_rest_operator] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/jdbc/__init__.py b/providers/tests/system/samba/__init__.py similarity index 100% rename from tests/system/providers/jdbc/__init__.py rename to providers/tests/system/samba/__init__.py diff --git a/tests/system/providers/samba/example_gcs_to_samba.py b/providers/tests/system/samba/example_gcs_to_samba.py similarity index 96% rename from tests/system/providers/samba/example_gcs_to_samba.py rename to providers/tests/system/samba/example_gcs_to_samba.py index c03a6f129ceae..d7717fcbfc727 100644 --- a/tests/system/providers/samba/example_gcs_to_samba.py +++ b/providers/tests/system/samba/example_gcs_to_samba.py @@ -30,7 +30,8 @@ from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.providers.samba.transfers.gcs_to_samba import GCSToSambaOperator from airflow.utils.trigger_rule import TriggerRule -from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID + +from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID @@ -137,14 +138,14 @@ >> delete_bucket ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/jenkins/__init__.py b/providers/tests/system/sftp/__init__.py similarity index 100% rename from tests/system/providers/jenkins/__init__.py rename to providers/tests/system/sftp/__init__.py diff --git a/tests/system/providers/sftp/example_sftp_sensor.py b/providers/tests/system/sftp/example_sftp_sensor.py similarity index 96% rename from tests/system/providers/sftp/example_sftp_sensor.py rename to providers/tests/system/sftp/example_sftp_sensor.py index df705b4b871f9..a3bde8a03af0a 100644 --- a/tests/system/providers/sftp/example_sftp_sensor.py +++ b/providers/tests/system/sftp/example_sftp_sensor.py @@ -89,14 +89,14 @@ def sftp_sensor_decorator(): >> remove_file_task_end ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/mssql/__init__.py b/providers/tests/system/singularity/__init__.py similarity index 100% rename from tests/system/providers/microsoft/mssql/__init__.py rename to providers/tests/system/singularity/__init__.py diff --git a/tests/system/providers/singularity/example_singularity.py b/providers/tests/system/singularity/example_singularity.py similarity index 95% rename from tests/system/providers/singularity/example_singularity.py rename to providers/tests/system/singularity/example_singularity.py index 4b60c080dcd2c..e80a0f40eb955 100644 --- a/tests/system/providers/singularity/example_singularity.py +++ b/providers/tests/system/singularity/example_singularity.py @@ -50,7 +50,7 @@ t3 >> t4 -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/winrm/__init__.py b/providers/tests/system/slack/__init__.py similarity index 100% rename from tests/system/providers/microsoft/winrm/__init__.py rename to providers/tests/system/slack/__init__.py diff --git a/tests/system/providers/slack/example_slack.py b/providers/tests/system/slack/example_slack.py similarity index 97% rename from tests/system/providers/slack/example_slack.py rename to providers/tests/system/slack/example_slack.py index c20cf8afa8150..f122dffca2782 100644 --- a/tests/system/providers/slack/example_slack.py +++ b/providers/tests/system/slack/example_slack.py @@ -94,7 +94,7 @@ ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/slack/example_slack_webhook.py b/providers/tests/system/slack/example_slack_webhook.py similarity index 97% rename from tests/system/providers/slack/example_slack_webhook.py rename to providers/tests/system/slack/example_slack_webhook.py index 98905d1b9ee67..7e45dc92d921f 100644 --- a/tests/system/providers/slack/example_slack_webhook.py +++ b/providers/tests/system/slack/example_slack_webhook.py @@ -69,7 +69,7 @@ slack_webhook_operator_text >> slack_webhook_operator_blocks -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/slack/example_sql_to_slack.py b/providers/tests/system/slack/example_sql_to_slack.py similarity index 96% rename from tests/system/providers/slack/example_sql_to_slack.py rename to providers/tests/system/slack/example_sql_to_slack.py index 2e672df9aa686..457bcecaea671 100644 --- a/tests/system/providers/slack/example_sql_to_slack.py +++ b/providers/tests/system/slack/example_sql_to_slack.py @@ -52,7 +52,7 @@ # [END howto_operator_sql_to_slack_api_file] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/slack/example_sql_to_slack_webhook.py b/providers/tests/system/slack/example_sql_to_slack_webhook.py similarity index 95% rename from tests/system/providers/slack/example_sql_to_slack_webhook.py rename to providers/tests/system/slack/example_sql_to_slack_webhook.py index 97bb8ae46ed2d..44fad156657be 100644 --- a/tests/system/providers/slack/example_sql_to_slack_webhook.py +++ b/providers/tests/system/slack/example_sql_to_slack_webhook.py @@ -51,7 +51,7 @@ # [END howto_operator_sql_to_slack_webhook] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/mysql/__init__.py b/providers/tests/system/snowflake/__init__.py similarity index 100% rename from tests/system/providers/mysql/__init__.py rename to providers/tests/system/snowflake/__init__.py diff --git a/tests/system/providers/snowflake/example_copy_into_snowflake.py b/providers/tests/system/snowflake/example_copy_into_snowflake.py similarity index 96% rename from tests/system/providers/snowflake/example_copy_into_snowflake.py rename to providers/tests/system/snowflake/example_copy_into_snowflake.py index e25112b95f3e4..32eb9699d74b6 100644 --- a/tests/system/providers/snowflake/example_copy_into_snowflake.py +++ b/providers/tests/system/snowflake/example_copy_into_snowflake.py @@ -60,7 +60,7 @@ # [END howto_operator_s3_copy_into_snowflake] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/snowflake/example_snowflake.py b/providers/tests/system/snowflake/example_snowflake.py similarity index 97% rename from tests/system/providers/snowflake/example_snowflake.py rename to providers/tests/system/snowflake/example_snowflake.py index 1a6f12080d74e..e851b13c08200 100644 --- a/tests/system/providers/snowflake/example_snowflake.py +++ b/providers/tests/system/snowflake/example_snowflake.py @@ -96,7 +96,7 @@ ) -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/snowflake/example_snowflake_snowflake_op_template_file.sql b/providers/tests/system/snowflake/example_snowflake_snowflake_op_template_file.sql similarity index 100% rename from tests/system/providers/snowflake/example_snowflake_snowflake_op_template_file.sql rename to providers/tests/system/snowflake/example_snowflake_snowflake_op_template_file.sql diff --git a/tests/system/providers/snowflake/example_snowpark_decorator.py b/providers/tests/system/snowflake/example_snowpark_decorator.py similarity index 97% rename from tests/system/providers/snowflake/example_snowpark_decorator.py rename to providers/tests/system/snowflake/example_snowpark_decorator.py index 1a303b1fdfa5f..3f74b97d0f23e 100644 --- a/tests/system/providers/snowflake/example_snowpark_decorator.py +++ b/providers/tests/system/snowflake/example_snowpark_decorator.py @@ -79,7 +79,8 @@ def check_num_rows(table_name: str): check_num_rows(table_name) # [END howto_decorator_snowpark] -from tests.system.utils import get_test_run # noqa: E402 + +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/snowflake/example_snowpark_operator.py b/providers/tests/system/snowflake/example_snowpark_operator.py similarity index 97% rename from tests/system/providers/snowflake/example_snowpark_operator.py rename to providers/tests/system/snowflake/example_snowpark_operator.py index 090a0f53a4bde..0543596ec9b1f 100644 --- a/tests/system/providers/snowflake/example_snowpark_operator.py +++ b/providers/tests/system/snowflake/example_snowpark_operator.py @@ -88,7 +88,8 @@ def check_num_rows(table_name: str): setup_data_operator >> check_num_rows_operator # [END howto_operator_snowpark] -from tests.system.utils import get_test_run # noqa: E402 + +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/neo4j/__init__.py b/providers/tests/system/sqlite/__init__.py similarity index 100% rename from tests/system/providers/neo4j/__init__.py rename to providers/tests/system/sqlite/__init__.py diff --git a/tests/system/providers/sqlite/create_table.sql b/providers/tests/system/sqlite/create_table.sql similarity index 100% rename from tests/system/providers/sqlite/create_table.sql rename to providers/tests/system/sqlite/create_table.sql diff --git a/tests/system/providers/sqlite/example_sqlite.py b/providers/tests/system/sqlite/example_sqlite.py similarity index 96% rename from tests/system/providers/sqlite/example_sqlite.py rename to providers/tests/system/sqlite/example_sqlite.py index 317da9af37808..1ea0270a0df27 100644 --- a/tests/system/providers/sqlite/example_sqlite.py +++ b/providers/tests/system/sqlite/example_sqlite.py @@ -91,13 +91,13 @@ def replace_sqlite_hook(): >> replace_sqlite_hook() ) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/openai/__init__.py b/providers/tests/system/tableau/__init__.py similarity index 100% rename from tests/system/providers/openai/__init__.py rename to providers/tests/system/tableau/__init__.py diff --git a/tests/system/providers/tableau/example_tableau.py b/providers/tests/system/tableau/example_tableau.py similarity index 97% rename from tests/system/providers/tableau/example_tableau.py rename to providers/tests/system/tableau/example_tableau.py index c5f6995b62fe0..1472511a66c95 100644 --- a/tests/system/providers/tableau/example_tableau.py +++ b/providers/tests/system/tableau/example_tableau.py @@ -71,7 +71,7 @@ # task_refresh_workbook_non_blocking >> task_check_job_status -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/opensearch/__init__.py b/providers/tests/system/telegram/__init__.py similarity index 100% rename from tests/system/providers/opensearch/__init__.py rename to providers/tests/system/telegram/__init__.py diff --git a/tests/system/providers/telegram/example_telegram.py b/providers/tests/system/telegram/example_telegram.py similarity index 95% rename from tests/system/providers/telegram/example_telegram.py rename to providers/tests/system/telegram/example_telegram.py index 18d734f3c69e8..a7e9018cf85f1 100644 --- a/tests/system/providers/telegram/example_telegram.py +++ b/providers/tests/system/telegram/example_telegram.py @@ -45,7 +45,7 @@ # [END howto_operator_telegram] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/opsgenie/__init__.py b/providers/tests/system/teradata/__init__.py similarity index 100% rename from tests/system/providers/opsgenie/__init__.py rename to providers/tests/system/teradata/__init__.py diff --git a/tests/system/providers/teradata/create_ssl_table.sql b/providers/tests/system/teradata/create_ssl_table.sql similarity index 100% rename from tests/system/providers/teradata/create_ssl_table.sql rename to providers/tests/system/teradata/create_ssl_table.sql diff --git a/tests/system/providers/teradata/create_table.sql b/providers/tests/system/teradata/create_table.sql similarity index 100% rename from tests/system/providers/teradata/create_table.sql rename to providers/tests/system/teradata/create_table.sql diff --git a/tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py b/providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py similarity index 98% rename from tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py rename to providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py index bcb1dd2fe6c5b..26fd27554d5d1 100644 --- a/tests/system/providers/teradata/example_azure_blob_to_teradata_transfer.py +++ b/providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py @@ -205,13 +205,13 @@ ) # [END azure_blob_to_teradata_transfer_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/teradata/example_s3_to_teradata_transfer.py b/providers/tests/system/teradata/example_s3_to_teradata_transfer.py similarity index 98% rename from tests/system/providers/teradata/example_s3_to_teradata_transfer.py rename to providers/tests/system/teradata/example_s3_to_teradata_transfer.py index ae8b827c1efae..ab361990b21e4 100644 --- a/tests/system/providers/teradata/example_s3_to_teradata_transfer.py +++ b/providers/tests/system/teradata/example_s3_to_teradata_transfer.py @@ -206,13 +206,13 @@ ) # [END s3_to_teradata_transfer_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/teradata/example_ssl_teradata.py b/providers/tests/system/teradata/example_ssl_teradata.py similarity index 97% rename from tests/system/providers/teradata/example_ssl_teradata.py rename to providers/tests/system/teradata/example_ssl_teradata.py index 1673bd791a79b..3836715a8e634 100644 --- a/tests/system/providers/teradata/example_ssl_teradata.py +++ b/providers/tests/system/teradata/example_ssl_teradata.py @@ -121,13 +121,13 @@ # [END teradata_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/teradata/example_teradata.py b/providers/tests/system/teradata/example_teradata.py similarity index 97% rename from tests/system/providers/teradata/example_teradata.py rename to providers/tests/system/teradata/example_teradata.py index 1fd587cdf8f71..ae41effce7fec 100644 --- a/tests/system/providers/teradata/example_teradata.py +++ b/providers/tests/system/teradata/example_teradata.py @@ -164,13 +164,13 @@ # [END teradata_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/teradata/example_teradata_call_sp.py b/providers/tests/system/teradata/example_teradata_call_sp.py similarity index 97% rename from tests/system/providers/teradata/example_teradata_call_sp.py rename to providers/tests/system/teradata/example_teradata_call_sp.py index 98ce85fffdfdc..f48583d83ec55 100644 --- a/tests/system/providers/teradata/example_teradata_call_sp.py +++ b/providers/tests/system/teradata/example_teradata_call_sp.py @@ -162,13 +162,13 @@ # [END howto_teradata_operator_for_sp] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/teradata/example_teradata_compute_cluster.py b/providers/tests/system/teradata/example_teradata_compute_cluster.py similarity index 97% rename from tests/system/providers/teradata/example_teradata_compute_cluster.py rename to providers/tests/system/teradata/example_teradata_compute_cluster.py index 3fefe9858770a..210cde7816bc2 100644 --- a/tests/system/providers/teradata/example_teradata_compute_cluster.py +++ b/providers/tests/system/teradata/example_teradata_compute_cluster.py @@ -146,13 +146,13 @@ # [END teradata_vantage_lake_compute_cluster_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/teradata/example_teradata_to_teradata_transfer.py b/providers/tests/system/teradata/example_teradata_to_teradata_transfer.py similarity index 97% rename from tests/system/providers/teradata/example_teradata_to_teradata_transfer.py rename to providers/tests/system/teradata/example_teradata_to_teradata_transfer.py index ac2517a33f519..7f315202606fc 100644 --- a/tests/system/providers/teradata/example_teradata_to_teradata_transfer.py +++ b/providers/tests/system/teradata/example_teradata_to_teradata_transfer.py @@ -148,13 +148,13 @@ ) # [END teradata_to_teradata_transfer_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/pgvector/__init__.py b/providers/tests/system/trino/__init__.py similarity index 100% rename from tests/system/providers/pgvector/__init__.py rename to providers/tests/system/trino/__init__.py diff --git a/tests/system/providers/trino/example_gcs_to_trino.py b/providers/tests/system/trino/example_gcs_to_trino.py similarity index 95% rename from tests/system/providers/trino/example_gcs_to_trino.py rename to providers/tests/system/trino/example_gcs_to_trino.py index e970e4d146718..e0249101eb1a4 100644 --- a/tests/system/providers/trino/example_gcs_to_trino.py +++ b/providers/tests/system/trino/example_gcs_to_trino.py @@ -50,7 +50,7 @@ # [END gcs_csv_to_trino_table] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/trino/example_trino.py b/providers/tests/system/trino/example_trino.py similarity index 97% rename from tests/system/providers/trino/example_trino.py rename to providers/tests/system/trino/example_trino.py index 91e1c9dfce6ad..d708d9b7b8fb7 100644 --- a/tests/system/providers/trino/example_trino.py +++ b/providers/tests/system/trino/example_trino.py @@ -91,7 +91,7 @@ # [END howto_operator_trino] -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/pinecone/__init__.py b/providers/tests/system/weaviate/__init__.py similarity index 100% rename from tests/system/providers/pinecone/__init__.py rename to providers/tests/system/weaviate/__init__.py diff --git a/tests/system/providers/weaviate/example_weaviate_cohere.py b/providers/tests/system/weaviate/example_weaviate_cohere.py similarity index 98% rename from tests/system/providers/weaviate/example_weaviate_cohere.py rename to providers/tests/system/weaviate/example_weaviate_cohere.py index d8662e464bd4b..1db5e2cccb72a 100644 --- a/tests/system/providers/weaviate/example_weaviate_cohere.py +++ b/providers/tests/system/weaviate/example_weaviate_cohere.py @@ -115,7 +115,7 @@ def delete_weaviate_collections(): example_weaviate_cohere() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/weaviate/example_weaviate_dynamic_mapping_dag.py b/providers/tests/system/weaviate/example_weaviate_dynamic_mapping_dag.py similarity index 97% rename from tests/system/providers/weaviate/example_weaviate_dynamic_mapping_dag.py rename to providers/tests/system/weaviate/example_weaviate_dynamic_mapping_dag.py index 5f998cc52e862..38c0168a9f615 100644 --- a/tests/system/providers/weaviate/example_weaviate_dynamic_mapping_dag.py +++ b/providers/tests/system/weaviate/example_weaviate_dynamic_mapping_dag.py @@ -92,7 +92,7 @@ def delete_weaviate_collection(collection_name): example_weaviate_dynamic_mapping_dag() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/weaviate/example_weaviate_openai.py b/providers/tests/system/weaviate/example_weaviate_openai.py similarity index 98% rename from tests/system/providers/weaviate/example_weaviate_openai.py rename to providers/tests/system/weaviate/example_weaviate_openai.py index adf20fd929f1b..edad9058f8738 100644 --- a/tests/system/providers/weaviate/example_weaviate_openai.py +++ b/providers/tests/system/weaviate/example_weaviate_openai.py @@ -123,7 +123,7 @@ def delete_weaviate_collection(): example_weaviate_openai() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/weaviate/example_weaviate_operator.py b/providers/tests/system/weaviate/example_weaviate_operator.py similarity index 99% rename from tests/system/providers/weaviate/example_weaviate_operator.py rename to providers/tests/system/weaviate/example_weaviate_operator.py index 081f2ef3903d7..309864ad64924 100644 --- a/tests/system/providers/weaviate/example_weaviate_operator.py +++ b/providers/tests/system/weaviate/example_weaviate_operator.py @@ -297,7 +297,7 @@ def delete_weaviate_docs_collection_without_vector(): example_weaviate_using_operator() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/weaviate/example_weaviate_using_hook.py b/providers/tests/system/weaviate/example_weaviate_using_hook.py similarity index 98% rename from tests/system/providers/weaviate/example_weaviate_using_hook.py rename to providers/tests/system/weaviate/example_weaviate_using_hook.py index 8ad7356465a48..601a222f94d4e 100644 --- a/tests/system/providers/weaviate/example_weaviate_using_hook.py +++ b/providers/tests/system/weaviate/example_weaviate_using_hook.py @@ -140,7 +140,7 @@ def delete_weaviate_collection_without_vector(): example_weaviate_dag_using_hook() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/weaviate/example_weaviate_vectorizer_dag.py b/providers/tests/system/weaviate/example_weaviate_vectorizer_dag.py similarity index 97% rename from tests/system/providers/weaviate/example_weaviate_vectorizer_dag.py rename to providers/tests/system/weaviate/example_weaviate_vectorizer_dag.py index 546148ad70ed2..f0306177bffd4 100644 --- a/tests/system/providers/weaviate/example_weaviate_vectorizer_dag.py +++ b/providers/tests/system/weaviate/example_weaviate_vectorizer_dag.py @@ -99,7 +99,7 @@ def delete_weaviate_collection(): example_weaviate_vectorizer_dag() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/weaviate/example_weaviate_without_vectorizer_dag.py b/providers/tests/system/weaviate/example_weaviate_without_vectorizer_dag.py similarity index 98% rename from tests/system/providers/weaviate/example_weaviate_without_vectorizer_dag.py rename to providers/tests/system/weaviate/example_weaviate_without_vectorizer_dag.py index c4062508e26d1..e2cf9e560e785 100644 --- a/tests/system/providers/weaviate/example_weaviate_without_vectorizer_dag.py +++ b/providers/tests/system/weaviate/example_weaviate_without_vectorizer_dag.py @@ -111,7 +111,7 @@ def delete_weaviate_collection(): example_weaviate_without_vectorizer_dag() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/weaviate/jeopardy_data_with_vectors.json b/providers/tests/system/weaviate/jeopardy_data_with_vectors.json similarity index 100% rename from tests/system/providers/weaviate/jeopardy_data_with_vectors.json rename to providers/tests/system/weaviate/jeopardy_data_with_vectors.json diff --git a/tests/system/providers/weaviate/jeopardy_data_without_vectors.json b/providers/tests/system/weaviate/jeopardy_data_without_vectors.json similarity index 100% rename from tests/system/providers/weaviate/jeopardy_data_without_vectors.json rename to providers/tests/system/weaviate/jeopardy_data_without_vectors.json diff --git a/tests/system/providers/weaviate/jeopardy_doc_data_without_vectors.json b/providers/tests/system/weaviate/jeopardy_doc_data_without_vectors.json similarity index 100% rename from tests/system/providers/weaviate/jeopardy_doc_data_without_vectors.json rename to providers/tests/system/weaviate/jeopardy_doc_data_without_vectors.json diff --git a/tests/system/providers/postgres/__init__.py b/providers/tests/system/yandex/__init__.py similarity index 100% rename from tests/system/providers/postgres/__init__.py rename to providers/tests/system/yandex/__init__.py diff --git a/tests/system/providers/yandex/example_yandexcloud.py b/providers/tests/system/yandex/example_yandexcloud.py similarity index 97% rename from tests/system/providers/yandex/example_yandexcloud.py rename to providers/tests/system/yandex/example_yandexcloud.py index ddebc46a3b50c..ab72adc14c051 100644 --- a/tests/system/providers/yandex/example_yandexcloud.py +++ b/providers/tests/system/yandex/example_yandexcloud.py @@ -32,7 +32,8 @@ from airflow import DAG from airflow.decorators import task from airflow.providers.yandex.hooks.yandex import YandexCloudBaseHook -from tests.system.utils import get_test_env_id + +from dev.tests_common.test_utils.system_tests import get_test_env_id ENV_ID = get_test_env_id() DAG_ID = "example_yandexcloud_hook" @@ -196,14 +197,14 @@ def delete_cluster( spark_job >> delete_task - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/yandex/example_yandexcloud_dataproc.py b/providers/tests/system/yandex/example_yandexcloud_dataproc.py similarity index 96% rename from tests/system/providers/yandex/example_yandexcloud_dataproc.py rename to providers/tests/system/yandex/example_yandexcloud_dataproc.py index 7ff4aa541dd54..d97e3d592ef88 100644 --- a/tests/system/providers/yandex/example_yandexcloud_dataproc.py +++ b/providers/tests/system/yandex/example_yandexcloud_dataproc.py @@ -31,7 +31,8 @@ # Name of the datacenter where Dataproc cluster will be created from airflow.utils.trigger_rule import TriggerRule -from tests.system.utils import get_test_env_id + +from dev.tests_common.test_utils.system_tests import get_test_env_id # should be filled with appropriate ids @@ -162,13 +163,13 @@ create_cluster >> create_mapreduce_job >> create_hive_query >> create_hive_query_from_file create_hive_query_from_file >> create_spark_job >> create_pyspark_job >> delete_cluster - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/yandex/example_yandexcloud_dataproc_lightweight.py b/providers/tests/system/yandex/example_yandexcloud_dataproc_lightweight.py similarity index 92% rename from tests/system/providers/yandex/example_yandexcloud_dataproc_lightweight.py rename to providers/tests/system/yandex/example_yandexcloud_dataproc_lightweight.py index 475bc789ec2b8..38401a1cdcd39 100644 --- a/tests/system/providers/yandex/example_yandexcloud_dataproc_lightweight.py +++ b/providers/tests/system/yandex/example_yandexcloud_dataproc_lightweight.py @@ -27,7 +27,8 @@ # Name of the datacenter where Dataproc cluster will be created from airflow.utils.trigger_rule import TriggerRule -from tests.system.utils import get_test_env_id + +from dev.tests_common.test_utils.system_tests import get_test_env_id # should be filled with appropriate ids @@ -70,13 +71,13 @@ ) create_spark_job >> delete_cluster - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/yandex/example_yandexcloud_yq.py b/providers/tests/system/yandex/example_yandexcloud_yq.py similarity index 88% rename from tests/system/providers/yandex/example_yandexcloud_yq.py rename to providers/tests/system/yandex/example_yandexcloud_yq.py index 0ebef685e24b7..02478825ed5e0 100644 --- a/tests/system/providers/yandex/example_yandexcloud_yq.py +++ b/providers/tests/system/yandex/example_yandexcloud_yq.py @@ -21,7 +21,8 @@ from airflow.models.dag import DAG from airflow.operators.empty import EmptyOperator from airflow.providers.yandex.operators.yq import YQExecuteQueryOperator -from tests.system.utils import get_test_env_id + +from dev.tests_common.test_utils.system_tests import get_test_env_id ENV_ID = get_test_env_id() DAG_ID = "example_yandexcloud_yq" @@ -39,13 +40,13 @@ yq_operator = YQExecuteQueryOperator(task_id="sample_query", sql="select 33 as d, 44 as t") yq_operator >> run_this_last - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/presto/__init__.py b/providers/tests/system/ydb/__init__.py similarity index 100% rename from tests/system/providers/presto/__init__.py rename to providers/tests/system/ydb/__init__.py diff --git a/tests/system/providers/ydb/example_ydb.py b/providers/tests/system/ydb/example_ydb.py similarity index 97% rename from tests/system/providers/ydb/example_ydb.py rename to providers/tests/system/ydb/example_ydb.py index 39156328f241b..d7293cb0d1b56 100644 --- a/tests/system/providers/ydb/example_ydb.py +++ b/providers/tests/system/ydb/example_ydb.py @@ -119,13 +119,13 @@ def populate_pet_table_via_bulk_upsert(): ) # [END ydb_operator_howto_guide] - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/qdrant/__init__.py b/providers/tests/system/zendesk/__init__.py similarity index 100% rename from tests/system/providers/qdrant/__init__.py rename to providers/tests/system/zendesk/__init__.py diff --git a/tests/system/providers/zendesk/example_zendesk_custom_get.py b/providers/tests/system/zendesk/example_zendesk_custom_get.py similarity index 95% rename from tests/system/providers/zendesk/example_zendesk_custom_get.py rename to providers/tests/system/zendesk/example_zendesk_custom_get.py index 701a109fd1159..c38d10a26abd3 100644 --- a/tests/system/providers/zendesk/example_zendesk_custom_get.py +++ b/providers/tests/system/zendesk/example_zendesk_custom_get.py @@ -45,7 +45,7 @@ def fetch_organizations() -> list[dict]: fetch_organizations() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/redis/__init__.py b/providers/tests/tableau/__init__.py similarity index 100% rename from tests/system/providers/redis/__init__.py rename to providers/tests/tableau/__init__.py diff --git a/tests/providers/teradata/triggers/__init__.py b/providers/tests/tableau/hooks/__init__.py similarity index 100% rename from tests/providers/teradata/triggers/__init__.py rename to providers/tests/tableau/hooks/__init__.py diff --git a/tests/providers/tableau/hooks/test_tableau.py b/providers/tests/tableau/hooks/test_tableau.py similarity index 95% rename from tests/providers/tableau/hooks/test_tableau.py rename to providers/tests/tableau/hooks/test_tableau.py index 8d10ce2fc368c..23d5e0188f609 100644 --- a/tests/providers/tableau/hooks/test_tableau.py +++ b/providers/tests/tableau/hooks/test_tableau.py @@ -115,10 +115,13 @@ def test_get_conn_auth_via_token_and_site_in_init(self, mock_server, mock_tablea """ Test get conn auth via token """ - with pytest.warns( - AirflowProviderDeprecationWarning, - match="Authentication via personal access token is deprecated..*", - ), TableauHook(site_id="test", tableau_conn_id="tableau_test_token") as tableau_hook: + with ( + pytest.warns( + AirflowProviderDeprecationWarning, + match="Authentication via personal access token is deprecated..*", + ), + TableauHook(site_id="test", tableau_conn_id="tableau_test_token") as tableau_hook, + ): mock_server.assert_called_once_with(tableau_hook.conn.host) mock_tableau_auth.assert_called_once_with( token_name=tableau_hook.conn.extra_dejson["token_name"], @@ -158,10 +161,13 @@ def test_get_conn_ssl_default(self, mock_server, mock_tableau_auth): """ Test get conn with default SSL parameters """ - with pytest.warns( - AirflowProviderDeprecationWarning, - match="Authentication via personal access token is deprecated..*", - ), TableauHook(tableau_conn_id="tableau_test_ssl_connection_default") as tableau_hook: + with ( + pytest.warns( + AirflowProviderDeprecationWarning, + match="Authentication via personal access token is deprecated..*", + ), + TableauHook(tableau_conn_id="tableau_test_ssl_connection_default") as tableau_hook, + ): mock_server.assert_called_once_with(tableau_hook.conn.host) mock_server.return_value.add_http_options.assert_called_once_with( options_dict={"verify": True, "cert": None} diff --git a/tests/system/providers/salesforce/__init__.py b/providers/tests/tableau/operators/__init__.py similarity index 100% rename from tests/system/providers/salesforce/__init__.py rename to providers/tests/tableau/operators/__init__.py diff --git a/tests/providers/tableau/operators/test_tableau.py b/providers/tests/tableau/operators/test_tableau.py similarity index 100% rename from tests/providers/tableau/operators/test_tableau.py rename to providers/tests/tableau/operators/test_tableau.py diff --git a/tests/system/providers/samba/__init__.py b/providers/tests/tableau/sensors/__init__.py similarity index 100% rename from tests/system/providers/samba/__init__.py rename to providers/tests/tableau/sensors/__init__.py diff --git a/tests/providers/tableau/sensors/test_tableau.py b/providers/tests/tableau/sensors/test_tableau.py similarity index 100% rename from tests/providers/tableau/sensors/test_tableau.py rename to providers/tests/tableau/sensors/test_tableau.py diff --git a/tests/providers/teradata/utils/__init__.py b/providers/tests/telegram/__init__.py similarity index 100% rename from tests/providers/teradata/utils/__init__.py rename to providers/tests/telegram/__init__.py diff --git a/tests/providers/trino/__init__.py b/providers/tests/telegram/hooks/__init__.py similarity index 100% rename from tests/providers/trino/__init__.py rename to providers/tests/telegram/hooks/__init__.py diff --git a/tests/providers/telegram/hooks/test_telegram.py b/providers/tests/telegram/hooks/test_telegram.py similarity index 100% rename from tests/providers/telegram/hooks/test_telegram.py rename to providers/tests/telegram/hooks/test_telegram.py diff --git a/tests/providers/trino/hooks/__init__.py b/providers/tests/telegram/operators/__init__.py similarity index 100% rename from tests/providers/trino/hooks/__init__.py rename to providers/tests/telegram/operators/__init__.py diff --git a/tests/providers/telegram/operators/test_telegram.py b/providers/tests/telegram/operators/test_telegram.py similarity index 100% rename from tests/providers/telegram/operators/test_telegram.py rename to providers/tests/telegram/operators/test_telegram.py diff --git a/tests/system/providers/sftp/__init__.py b/providers/tests/teradata/__init__.py similarity index 100% rename from tests/system/providers/sftp/__init__.py rename to providers/tests/teradata/__init__.py diff --git a/tests/providers/trino/transfers/__init__.py b/providers/tests/teradata/hooks/__init__.py similarity index 100% rename from tests/providers/trino/transfers/__init__.py rename to providers/tests/teradata/hooks/__init__.py diff --git a/tests/providers/teradata/hooks/test_teradata.py b/providers/tests/teradata/hooks/test_teradata.py similarity index 98% rename from tests/providers/teradata/hooks/test_teradata.py rename to providers/tests/teradata/hooks/test_teradata.py index 75367e4d406eb..21a19308b2da7 100644 --- a/tests/providers/teradata/hooks/test_teradata.py +++ b/providers/tests/teradata/hooks/test_teradata.py @@ -275,9 +275,12 @@ def test_bulk_insert_rows_without_fields(self): def test_bulk_insert_rows_no_rows(self): rows = [] - with pytest.raises(ValueError), pytest.warns( - AirflowProviderDeprecationWarning, - match="bulk_insert_rows is deprecated. Please use the insert_rows method instead.", + with ( + pytest.raises(ValueError), + pytest.warns( + AirflowProviderDeprecationWarning, + match="bulk_insert_rows is deprecated. Please use the insert_rows method instead.", + ), ): self.test_db_hook.bulk_insert_rows("table", rows) diff --git a/tests/providers/vertica/__init__.py b/providers/tests/teradata/operators/__init__.py similarity index 100% rename from tests/providers/vertica/__init__.py rename to providers/tests/teradata/operators/__init__.py diff --git a/tests/providers/teradata/operators/test_teradata.py b/providers/tests/teradata/operators/test_teradata.py similarity index 100% rename from tests/providers/teradata/operators/test_teradata.py rename to providers/tests/teradata/operators/test_teradata.py diff --git a/tests/providers/teradata/operators/test_teradata_compute_cluster.py b/providers/tests/teradata/operators/test_teradata_compute_cluster.py similarity index 100% rename from tests/providers/teradata/operators/test_teradata_compute_cluster.py rename to providers/tests/teradata/operators/test_teradata_compute_cluster.py diff --git a/tests/providers/vertica/hooks/__init__.py b/providers/tests/teradata/transfers/__init__.py similarity index 100% rename from tests/providers/vertica/hooks/__init__.py rename to providers/tests/teradata/transfers/__init__.py diff --git a/tests/providers/teradata/transfers/test_azure_blob_to_teradata.py b/providers/tests/teradata/transfers/test_azure_blob_to_teradata.py similarity index 100% rename from tests/providers/teradata/transfers/test_azure_blob_to_teradata.py rename to providers/tests/teradata/transfers/test_azure_blob_to_teradata.py diff --git a/tests/providers/teradata/transfers/test_s3_to_teradata.py b/providers/tests/teradata/transfers/test_s3_to_teradata.py similarity index 100% rename from tests/providers/teradata/transfers/test_s3_to_teradata.py rename to providers/tests/teradata/transfers/test_s3_to_teradata.py diff --git a/tests/providers/teradata/transfers/test_teradata_to_teradata.py b/providers/tests/teradata/transfers/test_teradata_to_teradata.py similarity index 100% rename from tests/providers/teradata/transfers/test_teradata_to_teradata.py rename to providers/tests/teradata/transfers/test_teradata_to_teradata.py diff --git a/tests/providers/vertica/operators/__init__.py b/providers/tests/teradata/triggers/__init__.py similarity index 100% rename from tests/providers/vertica/operators/__init__.py rename to providers/tests/teradata/triggers/__init__.py diff --git a/tests/providers/teradata/triggers/test_teradata_compute_cluster.py b/providers/tests/teradata/triggers/test_teradata_compute_cluster.py similarity index 100% rename from tests/providers/teradata/triggers/test_teradata_compute_cluster.py rename to providers/tests/teradata/triggers/test_teradata_compute_cluster.py diff --git a/tests/system/providers/__init__.py b/providers/tests/teradata/utils/__init__.py similarity index 100% rename from tests/system/providers/__init__.py rename to providers/tests/teradata/utils/__init__.py diff --git a/tests/providers/teradata/utils/test_constants.py b/providers/tests/teradata/utils/test_constants.py similarity index 100% rename from tests/providers/teradata/utils/test_constants.py rename to providers/tests/teradata/utils/test_constants.py diff --git a/tests/system/providers/amazon/__init__.py b/providers/tests/trino/__init__.py similarity index 100% rename from tests/system/providers/amazon/__init__.py rename to providers/tests/trino/__init__.py diff --git a/tests/system/providers/singularity/__init__.py b/providers/tests/trino/assets/__init__.py similarity index 100% rename from tests/system/providers/singularity/__init__.py rename to providers/tests/trino/assets/__init__.py diff --git a/tests/providers/trino/assets/test_trino.py b/providers/tests/trino/assets/test_trino.py similarity index 100% rename from tests/providers/trino/assets/test_trino.py rename to providers/tests/trino/assets/test_trino.py diff --git a/tests/system/providers/amazon/aws/__init__.py b/providers/tests/trino/hooks/__init__.py similarity index 100% rename from tests/system/providers/amazon/aws/__init__.py rename to providers/tests/trino/hooks/__init__.py diff --git a/tests/providers/trino/hooks/test_trino.py b/providers/tests/trino/hooks/test_trino.py similarity index 100% rename from tests/providers/trino/hooks/test_trino.py rename to providers/tests/trino/hooks/test_trino.py diff --git a/tests/system/providers/slack/__init__.py b/providers/tests/trino/operators/__init__.py similarity index 100% rename from tests/system/providers/slack/__init__.py rename to providers/tests/trino/operators/__init__.py diff --git a/tests/providers/trino/operators/test_trino.py b/providers/tests/trino/operators/test_trino.py similarity index 100% rename from tests/providers/trino/operators/test_trino.py rename to providers/tests/trino/operators/test_trino.py diff --git a/tests/system/providers/apache/__init__.py b/providers/tests/trino/transfers/__init__.py similarity index 100% rename from tests/system/providers/apache/__init__.py rename to providers/tests/trino/transfers/__init__.py diff --git a/tests/providers/trino/transfers/test_gcs_to_trino.py b/providers/tests/trino/transfers/test_gcs_to_trino.py similarity index 100% rename from tests/providers/trino/transfers/test_gcs_to_trino.py rename to providers/tests/trino/transfers/test_gcs_to_trino.py diff --git a/tests/system/providers/apache/beam/__init__.py b/providers/tests/vertica/__init__.py similarity index 100% rename from tests/system/providers/apache/beam/__init__.py rename to providers/tests/vertica/__init__.py diff --git a/tests/system/providers/apache/hive/__init__.py b/providers/tests/vertica/hooks/__init__.py similarity index 100% rename from tests/system/providers/apache/hive/__init__.py rename to providers/tests/vertica/hooks/__init__.py diff --git a/tests/providers/vertica/hooks/test_vertica.py b/providers/tests/vertica/hooks/test_vertica.py similarity index 100% rename from tests/providers/vertica/hooks/test_vertica.py rename to providers/tests/vertica/hooks/test_vertica.py diff --git a/tests/system/providers/apache/iceberg/__init__.py b/providers/tests/vertica/operators/__init__.py similarity index 100% rename from tests/system/providers/apache/iceberg/__init__.py rename to providers/tests/vertica/operators/__init__.py diff --git a/tests/providers/vertica/operators/test_vertica.py b/providers/tests/vertica/operators/test_vertica.py similarity index 100% rename from tests/providers/vertica/operators/test_vertica.py rename to providers/tests/vertica/operators/test_vertica.py diff --git a/tests/system/providers/snowflake/__init__.py b/providers/tests/weaviate/__init__.py similarity index 100% rename from tests/system/providers/snowflake/__init__.py rename to providers/tests/weaviate/__init__.py diff --git a/tests/system/providers/sqlite/__init__.py b/providers/tests/weaviate/hooks/__init__.py similarity index 100% rename from tests/system/providers/sqlite/__init__.py rename to providers/tests/weaviate/hooks/__init__.py diff --git a/tests/providers/weaviate/hooks/test_weaviate.py b/providers/tests/weaviate/hooks/test_weaviate.py similarity index 100% rename from tests/providers/weaviate/hooks/test_weaviate.py rename to providers/tests/weaviate/hooks/test_weaviate.py diff --git a/tests/system/providers/tableau/__init__.py b/providers/tests/weaviate/operators/__init__.py similarity index 100% rename from tests/system/providers/tableau/__init__.py rename to providers/tests/weaviate/operators/__init__.py diff --git a/tests/providers/weaviate/operators/test_weaviate.py b/providers/tests/weaviate/operators/test_weaviate.py similarity index 100% rename from tests/providers/weaviate/operators/test_weaviate.py rename to providers/tests/weaviate/operators/test_weaviate.py diff --git a/tests/system/providers/telegram/__init__.py b/providers/tests/yandex/__init__.py similarity index 100% rename from tests/system/providers/telegram/__init__.py rename to providers/tests/yandex/__init__.py diff --git a/tests/system/providers/teradata/__init__.py b/providers/tests/yandex/hooks/__init__.py similarity index 100% rename from tests/system/providers/teradata/__init__.py rename to providers/tests/yandex/hooks/__init__.py diff --git a/tests/providers/yandex/hooks/test_dataproc.py b/providers/tests/yandex/hooks/test_dataproc.py similarity index 100% rename from tests/providers/yandex/hooks/test_dataproc.py rename to providers/tests/yandex/hooks/test_dataproc.py diff --git a/tests/providers/yandex/hooks/test_yandex.py b/providers/tests/yandex/hooks/test_yandex.py similarity index 91% rename from tests/providers/yandex/hooks/test_yandex.py rename to providers/tests/yandex/hooks/test_yandex.py index e3eb36e2cfc8b..fbf77115053b2 100644 --- a/tests/providers/yandex/hooks/test_yandex.py +++ b/providers/tests/yandex/hooks/test_yandex.py @@ -27,7 +27,8 @@ from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.yandex.hooks.yandex import YandexCloudBaseHook -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars class TestYandexHook: @@ -62,9 +63,12 @@ def test_provider_user_agent(self, mock_get_credentials, mock_get_connection): sdk_prefix = "MyAirflow" hook = YandexCloudBaseHook() - with conf_vars({("yandex", "sdk_user_agent_prefix"): sdk_prefix}), pytest.warns( - AirflowProviderDeprecationWarning, - match="Using `provider_user_agent` in `YandexCloudBaseHook` is deprecated. Please use it in `utils.user_agent` instead.", + with ( + conf_vars({("yandex", "sdk_user_agent_prefix"): sdk_prefix}), + pytest.warns( + AirflowProviderDeprecationWarning, + match="Using `provider_user_agent` in `YandexCloudBaseHook` is deprecated. Please use it in `utils.user_agent` instead.", + ), ): assert hook.provider_user_agent().startswith(sdk_prefix) @@ -90,9 +94,12 @@ def test_sdk_user_agent(self, mock_get_credentials, mock_get_connection): ) @mock.patch("airflow.providers.yandex.utils.credentials.get_credentials", new=MagicMock()) def test_backcompat_prefix_works(self, uri): - with mock.patch.dict(os.environ, {"AIRFLOW_CONN_MY_CONN": uri}), pytest.warns( - AirflowProviderDeprecationWarning, - match="Using `connection_id` is deprecated. Please use `yandex_conn_id` parameter.", + with ( + mock.patch.dict(os.environ, {"AIRFLOW_CONN_MY_CONN": uri}), + pytest.warns( + AirflowProviderDeprecationWarning, + match="Using `connection_id` is deprecated. Please use `yandex_conn_id` parameter.", + ), ): hook = YandexCloudBaseHook("my_conn") assert hook.default_folder_id == "abc" diff --git a/tests/providers/yandex/hooks/test_yq.py b/providers/tests/yandex/hooks/test_yq.py similarity index 100% rename from tests/providers/yandex/hooks/test_yq.py rename to providers/tests/yandex/hooks/test_yq.py diff --git a/tests/system/providers/trino/__init__.py b/providers/tests/yandex/links/__init__.py similarity index 100% rename from tests/system/providers/trino/__init__.py rename to providers/tests/yandex/links/__init__.py diff --git a/tests/providers/yandex/links/test_yq.py b/providers/tests/yandex/links/test_yq.py similarity index 94% rename from tests/providers/yandex/links/test_yq.py rename to providers/tests/yandex/links/test_yq.py index d46862f1c737f..2ed720e7e141c 100644 --- a/tests/providers/yandex/links/test_yq.py +++ b/providers/tests/yandex/links/test_yq.py @@ -23,8 +23,9 @@ from airflow.models.taskinstance import TaskInstance from airflow.models.xcom import XCom from airflow.providers.yandex.links.yq import YQLink -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.mock_operators import MockOperator yandexcloud = pytest.importorskip("yandexcloud") diff --git a/tests/system/providers/weaviate/__init__.py b/providers/tests/yandex/operators/__init__.py similarity index 100% rename from tests/system/providers/weaviate/__init__.py rename to providers/tests/yandex/operators/__init__.py diff --git a/tests/providers/yandex/operators/test_dataproc.py b/providers/tests/yandex/operators/test_dataproc.py similarity index 100% rename from tests/providers/yandex/operators/test_dataproc.py rename to providers/tests/yandex/operators/test_dataproc.py diff --git a/tests/providers/yandex/operators/test_yq.py b/providers/tests/yandex/operators/test_yq.py similarity index 98% rename from tests/providers/yandex/operators/test_yq.py rename to providers/tests/yandex/operators/test_yq.py index 034f0505517ba..7e586e929eaff 100644 --- a/tests/providers/yandex/operators/test_yq.py +++ b/providers/tests/yandex/operators/test_yq.py @@ -22,7 +22,7 @@ import pytest -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS yandexcloud = pytest.importorskip("yandexcloud") diff --git a/tests/system/providers/yandex/__init__.py b/providers/tests/yandex/secrets/__init__.py similarity index 100% rename from tests/system/providers/yandex/__init__.py rename to providers/tests/yandex/secrets/__init__.py diff --git a/tests/providers/yandex/secrets/test_lockbox.py b/providers/tests/yandex/secrets/test_lockbox.py similarity index 100% rename from tests/providers/yandex/secrets/test_lockbox.py rename to providers/tests/yandex/secrets/test_lockbox.py diff --git a/tests/system/providers/ydb/__init__.py b/providers/tests/yandex/utils/__init__.py similarity index 100% rename from tests/system/providers/ydb/__init__.py rename to providers/tests/yandex/utils/__init__.py diff --git a/tests/providers/yandex/utils/test_credentials.py b/providers/tests/yandex/utils/test_credentials.py similarity index 100% rename from tests/providers/yandex/utils/test_credentials.py rename to providers/tests/yandex/utils/test_credentials.py diff --git a/tests/providers/yandex/utils/test_defaults.py b/providers/tests/yandex/utils/test_defaults.py similarity index 100% rename from tests/providers/yandex/utils/test_defaults.py rename to providers/tests/yandex/utils/test_defaults.py diff --git a/tests/providers/yandex/utils/test_fields.py b/providers/tests/yandex/utils/test_fields.py similarity index 100% rename from tests/providers/yandex/utils/test_fields.py rename to providers/tests/yandex/utils/test_fields.py diff --git a/tests/providers/yandex/utils/test_user_agent.py b/providers/tests/yandex/utils/test_user_agent.py similarity index 100% rename from tests/providers/yandex/utils/test_user_agent.py rename to providers/tests/yandex/utils/test_user_agent.py diff --git a/tests/system/providers/zendesk/__init__.py b/providers/tests/ydb/__init__.py similarity index 100% rename from tests/system/providers/zendesk/__init__.py rename to providers/tests/ydb/__init__.py diff --git a/tests/test_utils/operators/__init__.py b/providers/tests/ydb/hooks/__init__.py similarity index 100% rename from tests/test_utils/operators/__init__.py rename to providers/tests/ydb/hooks/__init__.py diff --git a/tests/providers/ydb/hooks/test_ydb.py b/providers/tests/ydb/hooks/test_ydb.py similarity index 100% rename from tests/providers/ydb/hooks/test_ydb.py rename to providers/tests/ydb/hooks/test_ydb.py diff --git a/tests/test_utils/perf/__init__.py b/providers/tests/ydb/operators/__init__.py similarity index 100% rename from tests/test_utils/perf/__init__.py rename to providers/tests/ydb/operators/__init__.py diff --git a/tests/providers/ydb/operators/test_ydb.py b/providers/tests/ydb/operators/test_ydb.py similarity index 100% rename from tests/providers/ydb/operators/test_ydb.py rename to providers/tests/ydb/operators/test_ydb.py diff --git a/tests/providers/ydb/utils/test_defaults.py b/providers/tests/ydb/utils/__init__.py similarity index 100% rename from tests/providers/ydb/utils/test_defaults.py rename to providers/tests/ydb/utils/__init__.py diff --git a/tests/providers/ydb/utils/test_credentials.py b/providers/tests/ydb/utils/test_credentials.py similarity index 100% rename from tests/providers/ydb/utils/test_credentials.py rename to providers/tests/ydb/utils/test_credentials.py diff --git a/tests/system/providers/google/leveldb/__init__.py b/providers/tests/ydb/utils/test_defaults.py similarity index 99% rename from tests/system/providers/google/leveldb/__init__.py rename to providers/tests/ydb/utils/test_defaults.py index 217e5db960782..13a83393a9124 100644 --- a/tests/system/providers/google/leveldb/__init__.py +++ b/providers/tests/ydb/utils/test_defaults.py @@ -1,4 +1,3 @@ -# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information diff --git a/tests/system/providers/microsoft/__init__.py b/providers/tests/zendesk/__init__.py similarity index 99% rename from tests/system/providers/microsoft/__init__.py rename to providers/tests/zendesk/__init__.py index 217e5db960782..13a83393a9124 100644 --- a/tests/system/providers/microsoft/__init__.py +++ b/providers/tests/zendesk/__init__.py @@ -1,4 +1,3 @@ -# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information diff --git a/tests/system/providers/dbt/__init__.py b/providers/tests/zendesk/hooks/__init__.py similarity index 99% rename from tests/system/providers/dbt/__init__.py rename to providers/tests/zendesk/hooks/__init__.py index 217e5db960782..13a83393a9124 100644 --- a/tests/system/providers/dbt/__init__.py +++ b/providers/tests/zendesk/hooks/__init__.py @@ -1,4 +1,3 @@ -# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information diff --git a/tests/providers/zendesk/hooks/test_zendesk.py b/providers/tests/zendesk/hooks/test_zendesk.py similarity index 100% rename from tests/providers/zendesk/hooks/test_zendesk.py rename to providers/tests/zendesk/hooks/test_zendesk.py diff --git a/pyproject.toml b/pyproject.toml index bc5842fc51c68..038393b139a02 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -205,7 +205,6 @@ include = [ "/airflow/git_version" ] exclude = [ - "/airflow/providers/", "/airflow/www/node_modules/" ] artifacts = [ @@ -219,9 +218,6 @@ artifacts = [ include = [ "/airflow", ] -exclude = [ - "/airflow/providers/", -] artifacts = [ "/airflow/www/static/dist/", "/airflow/git_version" @@ -240,9 +236,6 @@ line-length = 110 extend-exclude = [ ".eggs", "*/_vendor/*", - # The files generated by stubgen aren't 100% valid syntax it turns out, and we don't ship them, so we can - # ignore them in ruff - "airflow/providers/common/sql/*/*.pyi", "tests/dags/test_imports.py", ] @@ -337,7 +330,21 @@ docstring-code-format = true required-imports = ["from __future__ import annotations"] combine-as-imports = true -[tool.ruff.lint.per-file-ignores] + +section-order = [ + "future", + "standard-library", + "third-party", + "first-party", + "local-folder", + "testing" +] + +# Make sure we put the "dev" imports at the end, not as a third-party module +[tool.ruff.lint.isort.sections] +testing = ["dev", "providers.tests"] + +[tool.ruff.lint.extend-per-file-ignores] "airflow/__init__.py" = ["F401", "TCH004"] "airflow/models/__init__.py" = ["F401", "TCH004"] "airflow/models/sqla_models.py" = ["F401"] @@ -370,6 +377,7 @@ combine-as-imports = true # TRY002: Use `raise Exception` "dev/perf/*" = ["TID253"] "dev/check_files.py" = ["S101"] +"dev/tests_common/*" = ["S101", "TRY002"] "dev/breeze/tests/*" = ["TID253", "S101", "TRY002"] "tests/*" = ["D", "TID253", "S101", "TRY002"] "docker_tests/*" = ["D", "TID253", "S101", "TRY002"] @@ -382,47 +390,12 @@ combine-as-imports = true "airflow/api/auth/backend/kerberos_auth.py" = ["E402"] "airflow/security/kerberos.py" = ["E402"] "airflow/security/utils.py" = ["E402"] -"tests/providers/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py" = ["E402"] -"tests/providers/common/io/xcom/test_backend.py" = ["E402"] -"tests/providers/elasticsearch/log/elasticmock/__init__.py" = ["E402"] -"tests/providers/elasticsearch/log/elasticmock/utilities/__init__.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_hyperparameter_tuning_job.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_auto_ml.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_dataset.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_endpoint_service.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_generative_model.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_model_service.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_pipeline_job.py" = ["E402"] -"tests/providers/google/cloud/hooks/vertex_ai/test_prediction_service.py" = ["E402"] -"tests/providers/google/cloud/links/test_translate.py" = ["E402"] -"tests/providers/google/cloud/operators/test_automl.py"= ["E402"] -"tests/providers/google/cloud/operators/test_vertex_ai.py" = ["E402"] -"tests/providers/google/cloud/operators/vertex_ai/test_generative_model.py" = ["E402"] -"tests/providers/google/cloud/triggers/test_vertex_ai.py" = ["E402"] -"tests/providers/openai/hooks/test_openai.py" = ["E402"] -"tests/providers/openai/operators/test_openai.py" = ["E402"] -"tests/providers/openai/triggers/test_openai.py" = ["E402"] -"tests/providers/opensearch/conftest.py" = ["E402"] -"tests/providers/opensearch/hooks/test_opensearch.py" = ["E402"] -"tests/providers/opensearch/log/test_os_json_formatter.py" = ["E402"] -"tests/providers/opensearch/log/test_os_response.py" = ["E402"] -"tests/providers/opensearch/log/test_os_task_handler.py" = ["E402"] -"tests/providers/opensearch/operators/test_opensearch.py" = ["E402"] -"tests/providers/qdrant/hooks/test_qdrant.py" = ["E402"] -"tests/providers/qdrant/operators/test_qdrant.py" = ["E402"] -"tests/providers/snowflake/operators/test_snowflake_sql.py" = ["E402"] -"tests/providers/yandex/*/*.py" = ["E402"] # All the modules which do not follow B028 yet: https://docs.astral.sh/ruff/rules/no-explicit-stacklevel/ "helm_tests/airflow_aux/test_basic_helm_chart.py" = ["B028"] -# https://github.com/apache/airflow/issues/39252 -"airflow/providers/amazon/aws/hooks/eks.py" = ["W605"] - # Test compat imports banned imports to allow testing against older airflow versions -"tests/test_utils/compat.py" = ["TID251", "F401"] +"dev/tests_common/test_utils/compat.py" = ["TID251", "F401"] [tool.ruff.lint.flake8-tidy-imports] # Disallow all relative imports. @@ -512,11 +485,10 @@ norecursedirs = [ "airflow", "tests/_internals", "tests/dags_with_system_exit", - "tests/test_utils", "tests/dags_corrupted", "tests/dags", - "tests/system/providers/google/cloud/dataproc/resources", - "tests/system/providers/google/cloud/gcs/resources", + "providers/tests/system/google/cloud/dataproc/resources", + "providers/tests/system/google/cloud/gcs/resources", ] log_level = "INFO" filterwarnings = [ @@ -576,9 +548,9 @@ omit = [ "airflow/contrib/**", "airflow/example_dags/**", "airflow/migrations/**", - "airflow/providers/**/example_dags/**", "airflow/www/node_modules/**", - "airflow/providers/google/ads/_vendor/**", + "providers/src/airflow/providers/**/example_dags/**", + "providers/src/airflow/providers/google/ads/_vendor/**", ] [tool.coverage.report] @@ -609,6 +581,16 @@ show_error_codes = true disable_error_code = [ "annotation-unchecked", ] +# Since there are no __init__.py files in +# providers/src/apache/airflow/providers we need to tell MyPy where the "base" +# is, otherwise when it sees +# providers/src/apache/airflow/providers/redis/__init__.py, it thinks this is +# the toplevel `redis` folder. +explicit_package_bases = true +mypy_path = [ + "$MYPY_CONFIG_FILE_DIR", + "$MYPY_CONFIG_FILE_DIR/providers/src", +] [[tool.mypy.overrides]] module="airflow.config_templates.default_webserver_config" @@ -640,3 +622,16 @@ module=[ "apache_beam.*" ] ignore_errors = true + +[tool.uv] +dev-dependencies = [ + "local-providers", +] + +[tool.uv.sources] +# These names must match the names as defined in the pyproject.toml of the workspace items, +# *not* the workspace folder paths +local-providers = { workspace = true } + +[tool.uv.workspace] +members = ["providers"] diff --git a/scripts/ci/docker-compose/local.yml b/scripts/ci/docker-compose/local.yml index 2a55d8733c328..66a4736c5fd8d 100644 --- a/scripts/ci/docker-compose/local.yml +++ b/scripts/ci/docker-compose/local.yml @@ -85,6 +85,9 @@ services: - type: bind source: ../../../logs target: /root/airflow/logs + - type: bind + source: ../../../providers + target: /opt/airflow/providers - type: bind source: ../../../pyproject.toml target: /opt/airflow/pyproject.toml diff --git a/scripts/ci/kubernetes/k8s_requirements.txt b/scripts/ci/kubernetes/k8s_requirements.txt index 50e9399f5f221..1e587051f5845 100644 --- a/scripts/ci/kubernetes/k8s_requirements.txt +++ b/scripts/ci/kubernetes/k8s_requirements.txt @@ -1 +1,3 @@ --e .[devel-devscripts,devel-tests,cncf.kubernetes] --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt" +--constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt +-e .[devel-devscripts,devel-tests,cncf.kubernetes] +-e ./providers diff --git a/scripts/ci/pre_commit/check_providers_subpackages_all_have_init.py b/scripts/ci/pre_commit/check_providers_subpackages_all_have_init.py index da17f794eaeb6..af3afe0abc076 100755 --- a/scripts/ci/pre_commit/check_providers_subpackages_all_have_init.py +++ b/scripts/ci/pre_commit/check_providers_subpackages_all_have_init.py @@ -19,25 +19,27 @@ import os import sys -from glob import glob from pathlib import Path -ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir)) -ACCEPTED_NON_INIT_DIRS = ["adr", "doc", "templates"] +ROOT_DIR = Path(__file__).parents[3].resolve() +ACCEPTED_NON_INIT_DIRS = ["adr", "doc", "templates", "__pycache__"] -def check_dir_init_file(provider_files: list[str]) -> None: +def check_dir_init_file(folders: list[Path]) -> None: missing_init_dirs: list[Path] = [] - for candidate_path in provider_files: - if candidate_path.endswith("/__pycache__"): - continue - path = Path(candidate_path) - if path.is_dir() and not (path / "__init__.py").exists(): - if path.name not in ACCEPTED_NON_INIT_DIRS: - missing_init_dirs.append(path) + folders = list(folders) + for path in folders: + for root, dirs, files in os.walk(path): + # Edit it in place, so we don't recurse to folders we don't care about + dirs[:] = [d for d in dirs if d not in ACCEPTED_NON_INIT_DIRS] + + if "__init__.py" in files: + continue + + missing_init_dirs.append(Path(root)) if missing_init_dirs: - with open(os.path.join(ROOT_DIR, "scripts/ci/license-templates/LICENSE.txt")) as license: + with ROOT_DIR.joinpath("scripts/ci/license-templates/LICENSE.txt").open() as license: license_txt = license.readlines() prefixed_licensed_txt = [f"# {line}" if line != "\n" else "#\n" for line in license_txt] @@ -51,7 +53,11 @@ def check_dir_init_file(provider_files: list[str]) -> None: if __name__ == "__main__": - all_provider_subpackage_dirs = sorted(glob(f"{ROOT_DIR}/airflow/providers/**/*", recursive=True)) - check_dir_init_file(all_provider_subpackage_dirs) - all_test_provider_subpackage_dirs = sorted(glob(f"{ROOT_DIR}/tests/providers/**/*", recursive=True)) - check_dir_init_file(all_test_provider_subpackage_dirs) + providers_root = Path(f"{ROOT_DIR}/providers") + providers_ns = providers_root.joinpath("src", "airflow", "providers") + providers_tests = providers_root.joinpath("tests") + + providers_pkgs = sorted(map(lambda f: f.parent, providers_ns.rglob("provider.yaml"))) + check_dir_init_file(providers_pkgs) + + check_dir_init_file([providers_root / "tests"]) diff --git a/scripts/ci/pre_commit/check_system_tests.py b/scripts/ci/pre_commit/check_system_tests.py index 4c82272ad7875..c7c305460cb3e 100755 --- a/scripts/ci/pre_commit/check_system_tests.py +++ b/scripts/ci/pre_commit/check_system_tests.py @@ -37,13 +37,13 @@ WATCHER_APPEND_INSTRUCTION = "list(dag.tasks) >> watcher()" PYTEST_FUNCTION = """ -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) """ PYTEST_FUNCTION_PATTERN = re.compile( - r"from tests\.system\.utils import get_test_run(?: # noqa: E402)?\s+" + r"from dev\.tests_common\.test_utils\.system_tests import get_test_run(?: # noqa: E402)?\s+" r"(?:# .+\))?\s+" r"test_run = get_test_run\(dag\)" ) @@ -51,11 +51,11 @@ def _check_file(file: Path): content = file.read_text() - if "from tests.system.utils.watcher import watcher" in content: + if "from dev.tests_common.test_utils.watcher import watcher" in content: index = content.find(WATCHER_APPEND_INSTRUCTION) if index == -1: errors.append( - f"[red]The example {file} imports tests.system.utils.watcher " + f"[red]The example {file} imports dev.tests_common.test_utils.watcher " f"but does not use it properly![/]\n\n" "[yellow]Make sure you have:[/]\n\n" f" {WATCHER_APPEND_INSTRUCTION}\n\n" diff --git a/scripts/ci/pre_commit/check_system_tests_hidden_in_index.py b/scripts/ci/pre_commit/check_system_tests_hidden_in_index.py index fde6f38f45a9a..1c1fdb02c1793 100755 --- a/scripts/ci/pre_commit/check_system_tests_hidden_in_index.py +++ b/scripts/ci/pre_commit/check_system_tests_hidden_in_index.py @@ -54,10 +54,10 @@ def check_system_test_entry_hidden(provider_index: Path): :maxdepth: 1 :caption: System tests - System Tests <_api/tests/system/providers/{provider_path}/index> + System Tests <_api/tests/system/{provider_path}/index> """ index_text = provider_index.read_text() - system_tests_path = AIRFLOW_SOURCES_ROOT / "tests" / "system" / "providers" / provider_path + system_tests_path = AIRFLOW_SOURCES_ROOT / "providers" / "tests" / "system" / provider_path index_text_manual = index_text.split( ".. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME!" )[0] diff --git a/scripts/ci/pre_commit/mypy_folder.py b/scripts/ci/pre_commit/mypy_folder.py index 2e50a3334f298..b2d7c76580276 100755 --- a/scripts/ci/pre_commit/mypy_folder.py +++ b/scripts/ci/pre_commit/mypy_folder.py @@ -31,7 +31,7 @@ initialize_breeze_precommit(__name__, __file__) -ALLOWED_FOLDERS = ["airflow", "airflow/providers", "dev", "docs"] +ALLOWED_FOLDERS = ["airflow", "providers/src/airflow/providers", "dev", "docs"] if len(sys.argv) < 2: console.print(f"[yellow]You need to specify the folder to test as parameter: {ALLOWED_FOLDERS}\n") @@ -43,12 +43,10 @@ sys.exit(1) arguments = [mypy_folder] -if mypy_folder == "airflow/providers": +if mypy_folder == "providers/src/airflow/providers": arguments.extend( [ - "tests/providers", - "tests/system/providers", - "tests/integration/providers", + "providers/tests", "--namespace-packages", ] ) @@ -57,14 +55,6 @@ arguments.extend( [ "tests", - "--exclude", - "airflow/providers", - "--exclude", - "tests/providers", - "--exclude", - "tests/system/providers", - "--exclude", - "tests/integration/providers", ] ) diff --git a/scripts/ci/pre_commit/update_common_sql_api_stubs.py b/scripts/ci/pre_commit/update_common_sql_api_stubs.py index 954302804e6f1..371c758146a2e 100755 --- a/scripts/ci/pre_commit/update_common_sql_api_stubs.py +++ b/scripts/ci/pre_commit/update_common_sql_api_stubs.py @@ -39,10 +39,12 @@ from common_precommit_black_utils import black_format from common_precommit_utils import AIRFLOW_SOURCES_ROOT_PATH -PROVIDERS_ROOT = (AIRFLOW_SOURCES_ROOT_PATH / "airflow" / "providers").resolve(strict=True) +PROVIDERS_ROOT = (AIRFLOW_SOURCES_ROOT_PATH / "providers" / "src" / "airflow" / "providers").resolve( + strict=True +) COMMON_SQL_ROOT = (PROVIDERS_ROOT / "common" / "sql").resolve(strict=True) OUT_DIR = AIRFLOW_SOURCES_ROOT_PATH / "out" -OUT_DIR_PROVIDERS = OUT_DIR / "airflow" / "providers" +OUT_DIR_PROVIDERS = OUT_DIR / PROVIDERS_ROOT.relative_to(AIRFLOW_SOURCES_ROOT_PATH) COMMON_SQL_PACKAGE_PREFIX = "airflow.providers.common.sql." @@ -317,7 +319,7 @@ def compare_stub_files(generated_stub_path: Path, force_override: bool) -> tuple shutil.rmtree(OUT_DIR, ignore_errors=True) subprocess.run( - ["stubgen", *[os.fspath(path) for path in COMMON_SQL_ROOT.rglob("*.py")]], + ["stubgen", f"--out={ OUT_DIR }", COMMON_SQL_ROOT], cwd=AIRFLOW_SOURCES_ROOT_PATH, ) total_removals, total_additions = 0, 0 diff --git a/scripts/ci/pre_commit/update_example_dags_paths.py b/scripts/ci/pre_commit/update_example_dags_paths.py index 8b2c461ec8c1f..17d2a2ccea453 100755 --- a/scripts/ci/pre_commit/update_example_dags_paths.py +++ b/scripts/ci/pre_commit/update_example_dags_paths.py @@ -34,7 +34,7 @@ console = Console(color_system="standard", width=200) AIRFLOW_SOURCES_ROOT = Path(__file__).parents[3].resolve() - +PROVIDERS_SRC = AIRFLOW_SOURCES_ROOT / "providers" / "src" / "airflow" / "providers" EXAMPLE_DAGS_URL_MATCHER = re.compile( r"^(.*)(https://github.com/apache/airflow/tree/(.*)/airflow/providers/(.*)/example_dags)(/?>.*)$" @@ -45,10 +45,7 @@ def get_provider_and_version(url_path: str) -> tuple[str, str]: candidate_folders = url_path.split("/") while candidate_folders: try: - with open( - (AIRFLOW_SOURCES_ROOT / "airflow" / "providers").joinpath(*candidate_folders) - / "provider.yaml" - ) as f: + with PROVIDERS_SRC.joinpath(*candidate_folders, "provider.yaml").open() as f: provider_info = yaml.safe_load(f) version = provider_info["versions"][0] provider = "-".join(candidate_folders) @@ -68,13 +65,11 @@ def replace_match(file: Path, line: str) -> str | None: if match: url_path_to_dir = match.group(4) folders = url_path_to_dir.split("/") - example_dags_folder = (AIRFLOW_SOURCES_ROOT / "airflow" / "providers").joinpath( - *folders - ) / "example_dags" + example_dags_folder = PROVIDERS_SRC.joinpath(*folders, "example_dags") provider, version = get_provider_and_version(url_path_to_dir) proper_system_tests_url = ( f"https://github.com/apache/airflow/tree/providers-{provider}/{version}" - f"/tests/system/providers/{url_path_to_dir}" + f"/providers/tests/system/{url_path_to_dir}" ) if not example_dags_folder.exists(): if proper_system_tests_url in file.read_text(): diff --git a/scripts/ci/pre_commit/update_providers_dependencies.py b/scripts/ci/pre_commit/update_providers_dependencies.py index 903a692384558..8a0e797d0ab6a 100755 --- a/scripts/ci/pre_commit/update_providers_dependencies.py +++ b/scripts/ci/pre_commit/update_providers_dependencies.py @@ -34,9 +34,10 @@ AIRFLOW_SOURCES_ROOT = Path(__file__).parents[3].resolve() -AIRFLOW_PROVIDERS_DIR = AIRFLOW_SOURCES_ROOT / "airflow" / "providers" -AIRFLOW_TESTS_PROVIDERS_DIR = AIRFLOW_SOURCES_ROOT / "tests" / "providers" -AIRFLOW_SYSTEM_TESTS_PROVIDERS_DIR = AIRFLOW_SOURCES_ROOT / "system" / "tests" / "providers" +AIRFLOW_PROVIDERS_DIR = AIRFLOW_SOURCES_ROOT / "providers" +AIRFLOW_PROVIDERS_SRC_DIR = AIRFLOW_PROVIDERS_DIR / "src" / "airflow" / "providers" +AIRFLOW_TESTS_PROVIDERS_DIR = AIRFLOW_PROVIDERS_DIR / "tests" +AIRFLOW_SYSTEM_TESTS_PROVIDERS_DIR = AIRFLOW_TESTS_PROVIDERS_DIR / "tests" / "system" DEPENDENCIES_JSON_FILE_PATH = AIRFLOW_SOURCES_ROOT / "generated" / "provider_dependencies.json" @@ -95,16 +96,18 @@ def visit_ImportFrom(self, node: ImportFrom): def find_all_providers_and_provider_files(): - for root, _, filenames in os.walk(AIRFLOW_PROVIDERS_DIR): + for root, _, filenames in os.walk(AIRFLOW_PROVIDERS_SRC_DIR): for filename in filenames: if filename == "provider.yaml": provider_file = Path(root, filename) - provider_name = str(provider_file.parent.relative_to(AIRFLOW_PROVIDERS_DIR)).replace( + provider_name = str(provider_file.parent.relative_to(AIRFLOW_PROVIDERS_SRC_DIR)).replace( os.sep, "." ) provider_info = yaml.safe_load(provider_file.read_text()) if provider_info["state"] == "suspended": - suspended_paths.append(provider_file.parent.relative_to(AIRFLOW_PROVIDERS_DIR).as_posix()) + suspended_paths.append( + provider_file.parent.relative_to(AIRFLOW_PROVIDERS_SRC_DIR).as_posix() + ) ALL_PROVIDERS[provider_name] = provider_info path = Path(root, filename) if path.is_file() and path.name.endswith(".py"): @@ -145,7 +148,7 @@ def get_imports_from_file(file_path: Path) -> list[str]: def get_provider_id_from_file_name(file_path: Path) -> str | None: # is_relative_to is only available in Python 3.9 - we should simplify this check when we are Python 3.9+ try: - relative_path = file_path.relative_to(AIRFLOW_PROVIDERS_DIR) + relative_path = file_path.relative_to(AIRFLOW_PROVIDERS_SRC_DIR) except ValueError: try: relative_path = file_path.relative_to(AIRFLOW_SYSTEM_TESTS_PROVIDERS_DIR) @@ -173,7 +176,14 @@ def check_if_different_provider_used(file_path: Path) -> None: imported_provider = get_provider_id_from_import(import_name, file_path) if imported_provider is not None and imported_provider not in ALL_PROVIDERS: warnings.append(f"The provider {imported_provider} from {file_path} cannot be found.") - elif imported_provider and file_provider != imported_provider: + continue + + if imported_provider == "standard": + # Standard -- i.e. BashOperator is used in a lot of example dags, but we don't want to mark this + # as a provider cross dependency + if file_path.name == "celery_executor_utils.py" or "/example_dags/" in file_path.as_posix(): + continue + if imported_provider and file_provider != imported_provider: ALL_DEPENDENCIES[file_provider]["cross-providers-deps"].append(imported_provider) diff --git a/scripts/ci/pre_commit/version_heads_map.py b/scripts/ci/pre_commit/version_heads_map.py index 10a6dee2eaf23..6796819444d8c 100755 --- a/scripts/ci/pre_commit/version_heads_map.py +++ b/scripts/ci/pre_commit/version_heads_map.py @@ -29,8 +29,9 @@ DB_FILE = PROJECT_SOURCE_ROOT_DIR / "airflow" / "utils" / "db.py" MIGRATION_PATH = PROJECT_SOURCE_ROOT_DIR / "airflow" / "migrations" / "versions" -FAB_DB_FILE = PROJECT_SOURCE_ROOT_DIR / "airflow" / "providers" / "fab" / "auth_manager" / "models" / "db.py" -FAB_MIGRATION_PATH = PROJECT_SOURCE_ROOT_DIR / "airflow" / "providers" / "fab" / "migrations" / "versions" +PROVIDERS_SRC = PROJECT_SOURCE_ROOT_DIR / "providers" / "src" +FAB_DB_FILE = PROVIDERS_SRC / "airflow" / "providers" / "fab" / "auth_manager" / "models" / "db.py" +FAB_MIGRATION_PATH = PROVIDERS_SRC / "airflow" / "providers" / "fab" / "migrations" / "versions" sys.path.insert(0, str(Path(__file__).parent.resolve())) # make sure common_precommit_utils is importable diff --git a/scripts/docker/install_airflow.sh b/scripts/docker/install_airflow.sh index 5db10ad967695..324bca7ccfe09 100644 --- a/scripts/docker/install_airflow.sh +++ b/scripts/docker/install_airflow.sh @@ -48,8 +48,13 @@ function install_airflow() { # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method local installation_command_flags if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then + # We need _a _ file in there otherwise the editable install doesn't include anything in the .pth file + mkdir -p ./providers/src/airflow/providers/ + touch ./providers/src/airflow/providers/__init__.py + trap 'rm -f ./providers/src/airflow/providers/__init__.py 2>/dev/null' EXIT + # When installing from sources - we always use `--editable` mode - installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" + installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then diff --git a/scripts/in_container/install_devel_deps.py b/scripts/in_container/install_devel_deps.py index 7a2410f385dfc..84f28bb2cc48b 100755 --- a/scripts/in_container/install_devel_deps.py +++ b/scripts/in_container/install_devel_deps.py @@ -27,7 +27,8 @@ def get_devel_test_deps() -> list[str]: - devel_deps: list[str] = [] + # Pre-install the tests_common pytest plugin/utils, in case sources aren't mounted + devel_deps: list[str] = ["./dev/tests_common"] hatch_build_content = (AIRFLOW_SOURCES_DIR / "hatch_build.py").read_text().splitlines() store = False for line in hatch_build_content: diff --git a/scripts/in_container/run_mypy.sh b/scripts/in_container/run_mypy.sh index 0245825a72647..976963e444a05 100755 --- a/scripts/in_container/run_mypy.sh +++ b/scripts/in_container/run_mypy.sh @@ -30,8 +30,8 @@ then for folder in ${SUSPENDED_PROVIDERS_FOLDERS=} do ADDITIONAL_MYPY_OPTIONS+=( - "--exclude" "airflow/providers/${folder}/*" - "--exclude" "tests/providers/${folder}/*" + "--exclude" "providers/src/airflow/providers/${folder}/*" + "--exclude" "providers/tests/${folder}/*" ) done fi diff --git a/scripts/in_container/run_provider_yaml_files_check.py b/scripts/in_container/run_provider_yaml_files_check.py index 95978a04ddd09..ab5ebcac697b9 100755 --- a/scripts/in_container/run_provider_yaml_files_check.py +++ b/scripts/in_container/run_provider_yaml_files_check.py @@ -74,6 +74,7 @@ ) ROOT_DIR = pathlib.Path(__file__).resolve().parents[2] +PROVIDERS_SRC = ROOT_DIR / "providers" / "src" DOCS_DIR = ROOT_DIR.joinpath("docs") PROVIDER_DATA_SCHEMA_PATH = ROOT_DIR.joinpath("airflow", "provider.yaml.schema.json") PROVIDER_ISSUE_TEMPLATE_PATH = ROOT_DIR.joinpath( @@ -97,11 +98,13 @@ suspended_integrations: set[str] = set() -def _filepath_to_module(filepath: pathlib.Path) -> str: - p = filepath.resolve().relative_to(ROOT_DIR).as_posix() - if p.endswith(".py"): - p = p[:-3] - return p.replace("/", ".") +def _filepath_to_module(filepath: pathlib.Path | str) -> str: + if isinstance(filepath, str): + filepath = pathlib.Path(filepath) + if filepath.name == "provider.yaml": + filepath = filepath.parent + p = filepath.resolve().relative_to(PROVIDERS_SRC).with_suffix("") + return p.as_posix().replace("/", ".") def _load_schema() -> dict[str, Any]: @@ -270,8 +273,8 @@ def check_if_objects_exist_and_belong_to_package( def parse_module_data(provider_data, resource_type, yaml_file_path): - package_dir = ROOT_DIR.joinpath(yaml_file_path).parent - provider_package = pathlib.Path(yaml_file_path).parent.as_posix().replace("/", ".") + provider_dir = pathlib.Path(yaml_file_path).parent + package_dir = ROOT_DIR.joinpath(provider_dir) py_files = itertools.chain( package_dir.glob(f"**/{resource_type}/*.py"), package_dir.glob(f"{resource_type}/*.py"), @@ -280,7 +283,7 @@ def parse_module_data(provider_data, resource_type, yaml_file_path): ) expected_modules = {_filepath_to_module(f) for f in py_files if f.name != "__init__.py"} resource_data = provider_data.get(resource_type, []) - return expected_modules, provider_package, resource_data + return expected_modules, _filepath_to_module(provider_dir), resource_data def run_check(title: str): @@ -398,12 +401,9 @@ def check_completeness_of_list_of_transfers(yaml_files: dict[str, dict]) -> tupl current_modules, provider_package, yaml_file_path, resource_type, ObjectType.MODULE ) try: - package_name = os.fspath(ROOT_DIR.joinpath(yaml_file_path).parent.relative_to(ROOT_DIR)).replace( - "/", "." - ) assert_sets_equal( set(expected_modules), - f"Found list of transfer modules in provider package: {package_name}", + f"Found list of transfer modules in provider package: {provider_package}", set(current_modules), f"Currently configured list of transfer modules in {yaml_file_path}", ) @@ -423,7 +423,7 @@ def check_hook_class_name_entries_in_connection_types(yaml_files: dict[str, dict num_errors = 0 num_connection_types = 0 for yaml_file_path, provider_data in yaml_files.items(): - provider_package = pathlib.Path(yaml_file_path).parent.as_posix().replace("/", ".") + provider_package = _filepath_to_module(yaml_file_path) connection_types = provider_data.get(resource_type) if connection_types: num_connection_types += len(connection_types) @@ -440,7 +440,7 @@ def check_plugin_classes(yaml_files: dict[str, dict]) -> tuple[int, int]: num_errors = 0 num_plugins = 0 for yaml_file_path, provider_data in yaml_files.items(): - provider_package = pathlib.Path(yaml_file_path).parent.as_posix().replace("/", ".") + provider_package = _filepath_to_module(yaml_file_path) plugins = provider_data.get(resource_type) if plugins: num_plugins += len(plugins) @@ -460,7 +460,7 @@ def check_extra_link_classes(yaml_files: dict[str, dict]) -> tuple[int, int]: num_errors = 0 num_extra_links = 0 for yaml_file_path, provider_data in yaml_files.items(): - provider_package = pathlib.Path(yaml_file_path).parent.as_posix().replace("/", ".") + provider_package = _filepath_to_module(yaml_file_path) extra_links = provider_data.get(resource_type) if extra_links: num_extra_links += len(extra_links) @@ -476,7 +476,7 @@ def check_notification_classes(yaml_files: dict[str, dict]) -> tuple[int, int]: num_errors = 0 num_notifications = 0 for yaml_file_path, provider_data in yaml_files.items(): - provider_package = pathlib.Path(yaml_file_path).parent.as_posix().replace("/", ".") + provider_package = _filepath_to_module(yaml_file_path) notifications = provider_data.get(resource_type) if notifications: num_notifications += len(notifications) @@ -692,7 +692,9 @@ def check_providers_have_all_documentation_files(yaml_files: dict[str, dict]): ProvidersManager().initialize_providers_configuration() architecture = Architecture.get_current() console.print(f"Verifying packages on {architecture} architecture. Platform: {platform.machine()}.") - provider_files_pattern = pathlib.Path(ROOT_DIR, "airflow", "providers").rglob("provider.yaml") + provider_files_pattern = pathlib.Path(ROOT_DIR, "providers", "src", "airflow", "providers").rglob( + "provider.yaml" + ) all_provider_files = sorted(str(path) for path in provider_files_pattern) if len(sys.argv) > 1: paths = [os.fspath(ROOT_DIR / f) for f in sorted(sys.argv[1:])] diff --git a/tests/always/test_connection.py b/tests/always/test_connection.py index 5e41ddda798c0..0f6b27c62d4ef 100644 --- a/tests/always/test_connection.py +++ b/tests/always/test_connection.py @@ -33,7 +33,8 @@ from airflow.models import Connection, crypto from airflow.providers.sqlite.hooks.sqlite import SqliteHook from airflow.providers_manager import HookInfo -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars ConnectionParts = namedtuple("ConnectionParts", ["conn_type", "login", "password", "host", "port", "schema"]) diff --git a/tests/always/test_example_dags.py b/tests/always/test_example_dags.py index 9d10ce5cad199..9def5f42206f8 100644 --- a/tests/always/test_example_dags.py +++ b/tests/always/test_example_dags.py @@ -29,12 +29,13 @@ from airflow.models import DagBag from airflow.utils import yaml -from tests.test_utils.asserts import assert_queries_count + +from dev.tests_common.test_utils.asserts import assert_queries_count AIRFLOW_SOURCES_ROOT = Path(__file__).resolve().parents[2] AIRFLOW_PROVIDERS_ROOT = AIRFLOW_SOURCES_ROOT / "airflow" / "providers" CURRENT_PYTHON_VERSION = f"{sys.version_info.major}.{sys.version_info.minor}" -PROVIDERS_PREFIXES = ("airflow/providers/", "tests/system/providers/") +PROVIDERS_PREFIXES = ("providers/src/airflow/providers/", "providers/tests/system/") OPTIONAL_PROVIDERS_DEPENDENCIES: dict[str, dict[str, str | None]] = { # Some examples or system tests may depend on additional packages # that are not included in certain CI checks. @@ -50,17 +51,17 @@ # Generally, these should be resolved as soon as a parameter or operator is deprecated. # If the deprecation is postponed, the item should be added to this tuple, # and a corresponding Issue should be created on GitHub. - "tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py", - "tests/system/providers/google/cloud/dataflow/example_dataflow_sql.py", - "tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py", - "tests/system/providers/google/cloud/datapipelines/example_datapipeline.py", - "tests/system/providers/google/cloud/gcs/example_gcs_sensor.py", - "tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine.py", - "tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py", - "tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py", - "tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py", - "tests/system/providers/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py", - "tests/system/providers/google/cloud/life_sciences/example_life_sciences.py", + "providers/tests/system/google/cloud/bigquery/example_bigquery_operations.py", + "providers/tests/system/google/cloud/dataflow/example_dataflow_sql.py", + "providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py", + "providers/tests/system/google/cloud/datapipelines/example_datapipeline.py", + "providers/tests/system/google/cloud/gcs/example_gcs_sensor.py", + "providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py", + "providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py", + "providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py", + "providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py", + "providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py", + "providers/tests/system/google/cloud/life_sciences/example_life_sciences.py", # Deprecated Operators/Hooks, which replaced by common.sql Operators/Hooks ) @@ -89,7 +90,7 @@ def get_suspended_providers_folders() -> list[str]: suspended_providers.append( provider_path.parent.relative_to(AIRFLOW_SOURCES_ROOT) .as_posix() - .replace("airflow/providers/", "") + .replace("providers/src/airflow/providers/", "") ) return suspended_providers @@ -107,7 +108,7 @@ def get_python_excluded_providers_folders() -> list[str]: excluded_providers.append( provider_path.parent.relative_to(AIRFLOW_SOURCES_ROOT) .as_posix() - .replace("airflow/providers/", "") + .replace("providers/src/airflow/providers/", "") ) return excluded_providers diff --git a/tests/always/test_project_structure.py b/tests/always/test_project_structure.py index b27729a68a261..9db0b22df84fc 100644 --- a/tests/always/test_project_structure.py +++ b/tests/always/test_project_structure.py @@ -21,169 +21,159 @@ import itertools import mmap import os +import pathlib import pytest -ROOT_FOLDER = os.path.realpath( - os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) -) +ROOT_FOLDER = pathlib.Path(__file__).parents[2] +PROVIDERS_SRC = ROOT_FOLDER.joinpath("providers", "src") +PROVIDERS_TESTS = ROOT_FOLDER.joinpath("providers", "tests") class TestProjectStructure: def test_reference_to_providers_from_core(self): - for filename in glob.glob(f"{ROOT_FOLDER}/example_dags/**/*.py", recursive=True): + for filename in ROOT_FOLDER.glob("example_dags/**/*.py"): self.assert_file_not_contains(filename, "providers") def test_deprecated_packages(self): - path_pattern = f"{ROOT_FOLDER}/airflow/contrib/**/*.py" - - for filename in glob.glob(path_pattern, recursive=True): - if filename.endswith("/__init__.py"): + for filename in ROOT_FOLDER.glob("airflow/contrib/**/*.py"): + if filename.name == "__init__.py": self.assert_file_contains(filename, "This package is deprecated.") else: self.assert_file_contains(filename, "This module is deprecated.") - def assert_file_not_contains(self, filename: str, pattern: str): + def assert_file_not_contains(self, filename: pathlib.Path, pattern: str): with open(filename, "rb", 0) as file, mmap.mmap(file.fileno(), 0, access=mmap.ACCESS_READ) as content: if content.find(bytes(pattern, "utf-8")) != -1: pytest.fail(f"File {filename} not contains pattern - {pattern}") - def assert_file_contains(self, filename: str, pattern: str): + def assert_file_contains(self, filename: pathlib.Path, pattern: str): with open(filename, "rb", 0) as file, mmap.mmap(file.fileno(), 0, access=mmap.ACCESS_READ) as content: if content.find(bytes(pattern, "utf-8")) == -1: pytest.fail(f"File {filename} contains illegal pattern - {pattern}") def test_providers_modules_should_have_tests(self): """ - Assert every module in /airflow/providers has a corresponding test_ file in tests/airflow/providers. + Assert every module in /providers/src/airflow/providers has a corresponding test_ file in providers/providers. """ # The test below had a but for quite a while and we missed a lot of modules to have tess # We should make sure that one goes to 0 OVERLOOKED_TESTS = [ - "tests/providers/amazon/aws/executors/batch/test_boto_schema.py", - "tests/providers/amazon/aws/executors/batch/test_batch_executor_config.py", - "tests/providers/amazon/aws/executors/batch/test_utils.py", - "tests/providers/amazon/aws/executors/ecs/test_boto_schema.py", - "tests/providers/amazon/aws/executors/ecs/test_ecs_executor_config.py", - "tests/providers/amazon/aws/executors/ecs/test_utils.py", - "tests/providers/amazon/aws/executors/utils/test_base_config_keys.py", - "tests/providers/amazon/aws/operators/test_emr.py", - "tests/providers/amazon/aws/operators/test_sagemaker.py", - "tests/providers/amazon/aws/sensors/test_emr.py", - "tests/providers/amazon/aws/sensors/test_sagemaker.py", - "tests/providers/amazon/aws/test_exceptions.py", - "tests/providers/amazon/aws/triggers/test_step_function.py", - "tests/providers/amazon/aws/utils/test_rds.py", - "tests/providers/amazon/aws/utils/test_sagemaker.py", - "tests/providers/amazon/aws/utils/test_asset_compat_lineage_collector.py", - "tests/providers/amazon/aws/waiters/test_base_waiter.py", - "tests/providers/apache/cassandra/hooks/test_cassandra.py", - "tests/providers/apache/drill/operators/test_drill.py", - "tests/providers/apache/druid/operators/test_druid_check.py", - "tests/providers/apache/hdfs/hooks/test_hdfs.py", - "tests/providers/apache/hdfs/log/test_hdfs_task_handler.py", - "tests/providers/apache/hdfs/sensors/test_hdfs.py", - "tests/providers/apache/hive/plugins/test_hive.py", - "tests/providers/celery/executors/test_celery_executor_utils.py", - "tests/providers/celery/executors/test_default_celery.py", - "tests/providers/cloudant/test_cloudant_fake.py", - "tests/providers/cncf/kubernetes/backcompat/test_backwards_compat_converters.py", - "tests/providers/cncf/kubernetes/executors/test_kubernetes_executor_types.py", - "tests/providers/cncf/kubernetes/executors/test_kubernetes_executor_utils.py", - "tests/providers/cncf/kubernetes/operators/test_kubernetes_pod.py", - "tests/providers/cncf/kubernetes/test_k8s_model.py", - "tests/providers/cncf/kubernetes/test_kube_client.py", - "tests/providers/cncf/kubernetes/test_kube_config.py", - "tests/providers/cncf/kubernetes/test_pod_generator_deprecated.py", - "tests/providers/cncf/kubernetes/test_pod_launcher_deprecated.py", - "tests/providers/cncf/kubernetes/test_python_kubernetes_script.py", - "tests/providers/cncf/kubernetes/test_secret.py", - "tests/providers/cncf/kubernetes/triggers/test_kubernetes_pod.py", - "tests/providers/cncf/kubernetes/utils/test_delete_from.py", - "tests/providers/cncf/kubernetes/utils/test_k8s_hashlib_wrapper.py", - "tests/providers/cncf/kubernetes/utils/test_xcom_sidecar.py", - "tests/providers/google/cloud/fs/test_gcs.py", - "tests/providers/google/cloud/links/test_automl.py", - "tests/providers/google/cloud/links/test_base.py", - "tests/providers/google/cloud/links/test_bigquery.py", - "tests/providers/google/cloud/links/test_bigquery_dts.py", - "tests/providers/google/cloud/links/test_bigtable.py", - "tests/providers/google/cloud/links/test_cloud_build.py", - "tests/providers/google/cloud/links/test_cloud_functions.py", - "tests/providers/google/cloud/links/test_cloud_memorystore.py", - "tests/providers/google/cloud/links/test_cloud_sql.py", - "tests/providers/google/cloud/links/test_cloud_storage_transfer.py", - "tests/providers/google/cloud/links/test_cloud_tasks.py", - "tests/providers/google/cloud/links/test_compute.py", - "tests/providers/google/cloud/links/test_data_loss_prevention.py", - "tests/providers/google/cloud/links/test_datacatalog.py", - "tests/providers/google/cloud/links/test_dataflow.py", - "tests/providers/google/cloud/links/test_dataform.py", - "tests/providers/google/cloud/links/test_datafusion.py", - "tests/providers/google/cloud/links/test_dataplex.py", - "tests/providers/google/cloud/links/test_dataprep.py", - "tests/providers/google/cloud/links/test_dataproc.py", - "tests/providers/google/cloud/links/test_datastore.py", - "tests/providers/google/cloud/links/test_kubernetes_engine.py", - "tests/providers/google/cloud/links/test_life_sciences.py", - "tests/providers/google/cloud/links/test_mlengine.py", - "tests/providers/google/cloud/links/test_pubsub.py", - "tests/providers/google/cloud/links/test_spanner.py", - "tests/providers/google/cloud/links/test_stackdriver.py", - "tests/providers/google/cloud/links/test_vertex_ai.py", - "tests/providers/google/cloud/links/test_workflows.py", - "tests/providers/google/cloud/operators/vertex_ai/test_auto_ml.py", - "tests/providers/google/cloud/operators/vertex_ai/test_batch_prediction_job.py", - "tests/providers/google/cloud/operators/vertex_ai/test_custom_job.py", - "tests/providers/google/cloud/operators/vertex_ai/test_dataset.py", - "tests/providers/google/cloud/operators/vertex_ai/test_endpoint_service.py", - "tests/providers/google/cloud/operators/vertex_ai/test_hyperparameter_tuning_job.py", - "tests/providers/google/cloud/operators/vertex_ai/test_model_service.py", - "tests/providers/google/cloud/operators/vertex_ai/test_pipeline_job.py", - "tests/providers/google/cloud/sensors/test_dataform.py", - "tests/providers/google/cloud/transfers/test_bigquery_to_sql.py", - "tests/providers/google/cloud/transfers/test_presto_to_gcs.py", - "tests/providers/google/cloud/utils/test_bigquery.py", - "tests/providers/google/cloud/utils/test_bigquery_get_data.py", - "tests/providers/google/cloud/utils/test_dataform.py", - "tests/providers/google/common/links/test_storage.py", - "tests/providers/google/common/test_consts.py", - "tests/providers/google/test_go_module_utils.py", - "tests/providers/microsoft/azure/operators/test_adls.py", - "tests/providers/microsoft/azure/transfers/test_azure_blob_to_gcs.py", - "tests/providers/openlineage/utils/test_asset_compat_lineage_collector.py", - "tests/providers/slack/notifications/test_slack_notifier.py", - "tests/providers/snowflake/triggers/test_snowflake_trigger.py", - "tests/providers/yandex/hooks/test_yandexcloud_dataproc.py", - "tests/providers/yandex/operators/test_yandexcloud_dataproc.py", - "tests/providers/fab/migrations/test_env.py", + "providers/tests/amazon/aws/executors/batch/test_boto_schema.py", + "providers/tests/amazon/aws/executors/batch/test_batch_executor_config.py", + "providers/tests/amazon/aws/executors/batch/test_utils.py", + "providers/tests/amazon/aws/executors/ecs/test_boto_schema.py", + "providers/tests/amazon/aws/executors/ecs/test_ecs_executor_config.py", + "providers/tests/amazon/aws/executors/ecs/test_utils.py", + "providers/tests/amazon/aws/executors/utils/test_base_config_keys.py", + "providers/tests/amazon/aws/operators/test_emr.py", + "providers/tests/amazon/aws/operators/test_sagemaker.py", + "providers/tests/amazon/aws/sensors/test_emr.py", + "providers/tests/amazon/aws/sensors/test_sagemaker.py", + "providers/tests/amazon/aws/test_exceptions.py", + "providers/tests/amazon/aws/triggers/test_step_function.py", + "providers/tests/amazon/aws/utils/test_rds.py", + "providers/tests/amazon/aws/utils/test_sagemaker.py", + "providers/tests/amazon/aws/waiters/test_base_waiter.py", + "providers/tests/apache/cassandra/hooks/test_cassandra.py", + "providers/tests/apache/drill/operators/test_drill.py", + "providers/tests/apache/druid/operators/test_druid_check.py", + "providers/tests/apache/hdfs/hooks/test_hdfs.py", + "providers/tests/apache/hdfs/log/test_hdfs_task_handler.py", + "providers/tests/apache/hdfs/sensors/test_hdfs.py", + "providers/tests/apache/hive/plugins/test_hive.py", + "providers/tests/celery/executors/test_celery_executor_utils.py", + "providers/tests/celery/executors/test_default_celery.py", + "providers/tests/cloudant/test_cloudant_fake.py", + "providers/tests/cncf/kubernetes/backcompat/test_backwards_compat_converters.py", + "providers/tests/cncf/kubernetes/executors/test_kubernetes_executor_types.py", + "providers/tests/cncf/kubernetes/executors/test_kubernetes_executor_utils.py", + "providers/tests/cncf/kubernetes/operators/test_kubernetes_pod.py", + "providers/tests/cncf/kubernetes/test_k8s_model.py", + "providers/tests/cncf/kubernetes/test_kube_client.py", + "providers/tests/cncf/kubernetes/test_kube_config.py", + "providers/tests/cncf/kubernetes/test_pod_generator_deprecated.py", + "providers/tests/cncf/kubernetes/test_pod_launcher_deprecated.py", + "providers/tests/cncf/kubernetes/test_python_kubernetes_script.py", + "providers/tests/cncf/kubernetes/test_secret.py", + "providers/tests/cncf/kubernetes/triggers/test_kubernetes_pod.py", + "providers/tests/cncf/kubernetes/utils/test_delete_from.py", + "providers/tests/cncf/kubernetes/utils/test_k8s_hashlib_wrapper.py", + "providers/tests/cncf/kubernetes/utils/test_xcom_sidecar.py", + "providers/tests/google/cloud/fs/test_gcs.py", + "providers/tests/google/cloud/links/test_automl.py", + "providers/tests/google/cloud/links/test_base.py", + "providers/tests/google/cloud/links/test_bigquery.py", + "providers/tests/google/cloud/links/test_bigquery_dts.py", + "providers/tests/google/cloud/links/test_bigtable.py", + "providers/tests/google/cloud/links/test_cloud_build.py", + "providers/tests/google/cloud/links/test_cloud_functions.py", + "providers/tests/google/cloud/links/test_cloud_memorystore.py", + "providers/tests/google/cloud/links/test_cloud_sql.py", + "providers/tests/google/cloud/links/test_cloud_storage_transfer.py", + "providers/tests/google/cloud/links/test_cloud_tasks.py", + "providers/tests/google/cloud/links/test_compute.py", + "providers/tests/google/cloud/links/test_data_loss_prevention.py", + "providers/tests/google/cloud/links/test_datacatalog.py", + "providers/tests/google/cloud/links/test_dataflow.py", + "providers/tests/google/cloud/links/test_dataform.py", + "providers/tests/google/cloud/links/test_datafusion.py", + "providers/tests/google/cloud/links/test_dataplex.py", + "providers/tests/google/cloud/links/test_dataprep.py", + "providers/tests/google/cloud/links/test_dataproc.py", + "providers/tests/google/cloud/links/test_datastore.py", + "providers/tests/google/cloud/links/test_kubernetes_engine.py", + "providers/tests/google/cloud/links/test_life_sciences.py", + "providers/tests/google/cloud/links/test_mlengine.py", + "providers/tests/google/cloud/links/test_pubsub.py", + "providers/tests/google/cloud/links/test_spanner.py", + "providers/tests/google/cloud/links/test_stackdriver.py", + "providers/tests/google/cloud/links/test_vertex_ai.py", + "providers/tests/google/cloud/links/test_workflows.py", + "providers/tests/google/cloud/operators/vertex_ai/test_auto_ml.py", + "providers/tests/google/cloud/operators/vertex_ai/test_batch_prediction_job.py", + "providers/tests/google/cloud/operators/vertex_ai/test_custom_job.py", + "providers/tests/google/cloud/operators/vertex_ai/test_endpoint_service.py", + "providers/tests/google/cloud/operators/vertex_ai/test_hyperparameter_tuning_job.py", + "providers/tests/google/cloud/operators/vertex_ai/test_model_service.py", + "providers/tests/google/cloud/operators/vertex_ai/test_pipeline_job.py", + "providers/tests/google/cloud/sensors/test_dataform.py", + "providers/tests/google/cloud/transfers/test_bigquery_to_sql.py", + "providers/tests/google/cloud/transfers/test_presto_to_gcs.py", + "providers/tests/google/cloud/utils/test_bigquery.py", + "providers/tests/google/cloud/utils/test_bigquery_get_data.py", + "providers/tests/google/cloud/utils/test_dataform.py", + "providers/tests/google/common/links/test_storage.py", + "providers/tests/google/common/test_consts.py", + "providers/tests/google/test_go_module_utils.py", + "providers/tests/microsoft/azure/operators/test_adls.py", + "providers/tests/microsoft/azure/transfers/test_azure_blob_to_gcs.py", + "providers/tests/slack/notifications/test_slack_notifier.py", + "providers/tests/snowflake/triggers/test_snowflake_trigger.py", + "providers/tests/yandex/hooks/test_yandexcloud_dataproc.py", + "providers/tests/yandex/operators/test_yandexcloud_dataproc.py", + "providers/tests/fab/migrations/test_env.py", ] # TODO: Should we extend this test to cover other directories? - modules_files = list(glob.glob(f"{ROOT_FOLDER}/airflow/providers/**/*.py", recursive=True)) + modules_files = PROVIDERS_SRC.joinpath("airflow", "providers").glob("**/*.py") + # Exclude __init__.py + modules_files = filter(lambda f: f.name != "__init__.py", modules_files) # Make path relative - modules_files = list(os.path.relpath(f, ROOT_FOLDER) for f in modules_files) + modules_files = map(lambda f: f.relative_to(PROVIDERS_SRC / "airflow" / "providers"), modules_files) # Exclude example_dags - modules_files = list(f for f in modules_files if "/example_dags/" not in f) + modules_files = (f for f in modules_files if "example_dags" not in f.parts) # Exclude _vendor - modules_files = list(f for f in modules_files if "/_vendor/" not in f) - # Exclude __init__.py - modules_files = list(f for f in modules_files if not f.endswith("__init__.py")) + modules_files = (f for f in modules_files if "_vendor" not in f.parts) # Exclude versions file - modules_files = list(f for f in modules_files if "/versions/" not in f) - # Change airflow/ to tests/ - expected_test_files = list( - f'tests/{f.partition("/")[2]}' for f in modules_files if not f.endswith("__init__.py") - ) + modules_files = (f for f in modules_files if "versions" not in f.parts) + # Change src/airflow/providers/ to tests/ + test_folder = pathlib.Path("providers/tests") # Add test_ prefix to filename - expected_test_files = list( - f'{f.rpartition("/")[0]}/test_{f.rpartition("/")[2]}' - for f in expected_test_files - if not f.endswith("__init__.py") - ) + expected_test_files = (test_folder.joinpath(f.with_name("test_" + f.name)) for f in modules_files) - current_test_files = glob.glob(f"{ROOT_FOLDER}/tests/providers/**/*.py", recursive=True) + current_test_files = PROVIDERS_TESTS.glob("**/*.py") # Make path relative current_test_files = (os.path.relpath(f, ROOT_FOLDER) for f in current_test_files) # Exclude __init__.py @@ -219,9 +209,9 @@ def get_imports_from_file(filepath: str): return import_names -def filepath_to_module(filepath: str): - filepath = os.path.relpath(os.path.abspath(filepath), ROOT_FOLDER) - return filepath.replace("/", ".")[: -(len(".py"))] +def filepath_to_module(path: pathlib.Path): + path = path.relative_to(PROVIDERS_SRC) + return path.as_posix().replace("/", ".")[: -(len(".py"))] def print_sorted(container: set, indent: str = " ") -> None: @@ -235,24 +225,22 @@ class ProjectStructureTest: CLASS_SUFFIXES = ["Operator", "Sensor"] def class_paths(self): - """Override this method if your classes are located under different paths""" for resource_type in self.CLASS_DIRS: - python_files = glob.glob( - f"{ROOT_FOLDER}/airflow/providers/{self.PROVIDER}/**/{resource_type}/**.py", recursive=True + python_files = PROVIDERS_SRC.glob( + f"airflow/providers/{self.PROVIDER}/**/{resource_type}/**/*.py", ) # Make path relative - resource_files = (os.path.relpath(f, ROOT_FOLDER) for f in python_files) - resource_files = (f for f in resource_files if not f.endswith("__init__.py")) + resource_files = filter(lambda f: f.name != "__init__.py", python_files) yield from resource_files def list_of_classes(self): classes = {} for operator_file in self.class_paths(): - operators_paths = self.get_classes_from_file(f"{ROOT_FOLDER}/{operator_file}") + operators_paths = self.get_classes_from_file(operator_file) classes.update(operators_paths) return classes - def get_classes_from_file(self, filepath: str): + def get_classes_from_file(self, filepath: pathlib.Path): with open(filepath) as py_file: content = py_file.read() doc_node = ast.parse(content, filepath) @@ -282,11 +270,12 @@ def example_paths(self): """Override this method if your example dags are located elsewhere""" # old_design: yield from glob.glob( - f"{ROOT_FOLDER}/airflow/providers/{self.PROVIDER}/**/example_dags/example_*.py", recursive=True + f"{ROOT_FOLDER}/providers/src/airflow/providers/{self.PROVIDER}/**/example_dags/example_*.py", + recursive=True, ) # new_design: yield from glob.glob( - f"{ROOT_FOLDER}/tests/system/providers/{self.PROVIDER}/**/example_*.py", recursive=True + f"{ROOT_FOLDER}/providers/tests/system/{self.PROVIDER}/**/example_*.py", recursive=True ) def test_missing_examples(self): diff --git a/tests/always/test_secrets.py b/tests/always/test_secrets.py index 9e1381faf1fef..a8b2296de1ccd 100644 --- a/tests/always/test_secrets.py +++ b/tests/always/test_secrets.py @@ -24,8 +24,9 @@ from airflow.configuration import ensure_secrets_loaded, initialize_secrets_backends from airflow.models import Connection, Variable from airflow.secrets.cache import SecretCache -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_variables + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_variables class TestConnectionsFromSecrets: diff --git a/tests/always/test_secrets_backends.py b/tests/always/test_secrets_backends.py index e862ed3263a6a..c5093d2e562a7 100644 --- a/tests/always/test_secrets_backends.py +++ b/tests/always/test_secrets_backends.py @@ -28,7 +28,8 @@ from airflow.secrets.environment_variables import EnvironmentVariablesBackend from airflow.secrets.metastore import MetastoreBackend from airflow.utils.session import create_session -from tests.test_utils.db import clear_db_connections, clear_db_variables + +from dev.tests_common.test_utils.db import clear_db_connections, clear_db_variables pytestmark = pytest.mark.db_test diff --git a/tests/always/test_secrets_local_filesystem.py b/tests/always/test_secrets_local_filesystem.py index 44b93dedb8222..40472ec64d5f5 100644 --- a/tests/always/test_secrets_local_filesystem.py +++ b/tests/always/test_secrets_local_filesystem.py @@ -29,7 +29,8 @@ from airflow.models import Variable from airflow.secrets import local_filesystem from airflow.secrets.local_filesystem import LocalFilesystemBackend -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars @contextmanager diff --git a/tests/api_connexion/conftest.py b/tests/api_connexion/conftest.py index 6a23b2cf11d93..8ab76fbb76298 100644 --- a/tests/api_connexion/conftest.py +++ b/tests/api_connexion/conftest.py @@ -19,8 +19,9 @@ import pytest from airflow.www import app -from tests.test_utils.config import conf_vars -from tests.test_utils.decorators import dont_initialize_flask_app_submodules + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules @pytest.fixture(scope="session") @@ -38,7 +39,7 @@ def minimal_app_for_api(): def factory(): with conf_vars( { - ("api", "auth_backends"): "tests.test_utils.remote_user_api_auth_backend", + ("api", "auth_backends"): "dev.tests_common.test_utils.remote_user_api_auth_backend", ( "core", "auth_manager", diff --git a/tests/api_connexion/endpoints/test_asset_endpoint.py b/tests/api_connexion/endpoints/test_asset_endpoint.py index 40ad59067f594..39b0129c3317d 100644 --- a/tests/api_connexion/endpoints/test_asset_endpoint.py +++ b/tests/api_connexion/endpoints/test_asset_endpoint.py @@ -36,11 +36,12 @@ from airflow.utils import timezone from airflow.utils.session import provide_session from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_assets, clear_db_runs -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_assets, clear_db_runs +from dev.tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_backfill_endpoint.py b/tests/api_connexion/endpoints/test_backfill_endpoint.py index dd086339b73ac..67ec6316e2ffd 100644 --- a/tests/api_connexion/endpoints/test_backfill_endpoint.py +++ b/tests/api_connexion/endpoints/test_backfill_endpoint.py @@ -30,8 +30,14 @@ from airflow.operators.empty import EmptyOperator from airflow.utils import timezone from airflow.utils.session import provide_session -from tests.test_utils.api_connexion_utils import create_user, delete_user -from tests.test_utils.db import clear_db_backfills, clear_db_dags, clear_db_runs, clear_db_serialized_dags + +from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user +from dev.tests_common.test_utils.db import ( + clear_db_backfills, + clear_db_dags, + clear_db_runs, + clear_db_serialized_dags, +) pytestmark = [pytest.mark.db_test, pytest.mark.need_serialized_dag] diff --git a/tests/api_connexion/endpoints/test_config_endpoint.py b/tests/api_connexion/endpoints/test_config_endpoint.py index bd88c491c952b..c46a6571c23b9 100644 --- a/tests/api_connexion/endpoints/test_config_endpoint.py +++ b/tests/api_connexion/endpoints/test_config_endpoint.py @@ -21,8 +21,8 @@ import pytest -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.config import conf_vars +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_connection_endpoint.py b/tests/api_connexion/endpoints/test_connection_endpoint.py index a140046656e31..fb2d53074cd87 100644 --- a/tests/api_connexion/endpoints/test_connection_endpoint.py +++ b/tests/api_connexion/endpoints/test_connection_endpoint.py @@ -25,10 +25,11 @@ from airflow.models import Connection from airflow.secrets.environment_variables import CONN_ENV_PREFIX from airflow.utils.session import provide_session -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_connections -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_connections +from dev.tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_endpoint.py b/tests/api_connexion/endpoints/test_dag_endpoint.py index 907009d26df89..86e0a3b4b3ec6 100644 --- a/tests/api_connexion/endpoints/test_dag_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_endpoint.py @@ -30,10 +30,11 @@ from airflow.operators.empty import EmptyOperator from airflow.utils.session import provide_session from airflow.utils.state import TaskInstanceState -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from dev.tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_parsing.py b/tests/api_connexion/endpoints/test_dag_parsing.py index ae42a565dd052..0fca8ee63c973 100644 --- a/tests/api_connexion/endpoints/test_dag_parsing.py +++ b/tests/api_connexion/endpoints/test_dag_parsing.py @@ -24,8 +24,9 @@ from airflow.models import DagBag from airflow.models.dagbag import DagPriorityParsingRequest -from tests.test_utils.api_connexion_utils import create_user, delete_user -from tests.test_utils.db import clear_db_dag_parsing_requests + +from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_dag_parsing_requests pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_run_endpoint.py b/tests/api_connexion/endpoints/test_dag_run_endpoint.py index 6262d4e83479e..b881f5ea98638 100644 --- a/tests/api_connexion/endpoints/test_dag_run_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_run_endpoint.py @@ -34,11 +34,12 @@ from airflow.utils.session import create_session, provide_session from airflow.utils.state import DagRunState, State from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from dev.tests_common.test_utils.www import _check_last_log if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/api_connexion/endpoints/test_dag_source_endpoint.py b/tests/api_connexion/endpoints/test_dag_source_endpoint.py index f4df56ba629ae..80f32dc715a3c 100644 --- a/tests/api_connexion/endpoints/test_dag_source_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_source_endpoint.py @@ -23,8 +23,9 @@ import pytest from airflow.models import DagBag -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.db import clear_db_dag_code, clear_db_dags, clear_db_serialized_dags + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_dag_code, clear_db_dags, clear_db_serialized_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_stats_endpoint.py b/tests/api_connexion/endpoints/test_dag_stats_endpoint.py index 9ab5b49765931..a447e2a6a4b23 100644 --- a/tests/api_connexion/endpoints/test_dag_stats_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_stats_endpoint.py @@ -26,8 +26,9 @@ from airflow.utils.session import create_session from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import create_user, delete_user -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags + +from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_warning_endpoint.py b/tests/api_connexion/endpoints/test_dag_warning_endpoint.py index f156d8921c0e6..b9c00991413bb 100644 --- a/tests/api_connexion/endpoints/test_dag_warning_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_warning_endpoint.py @@ -23,8 +23,9 @@ from airflow.models.dag import DagModel from airflow.models.dagwarning import DagWarning from airflow.utils.session import create_session -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.db import clear_db_dag_warnings, clear_db_dags + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_dag_warnings, clear_db_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_event_log_endpoint.py b/tests/api_connexion/endpoints/test_event_log_endpoint.py index e5ca3d301765a..42a610890ede2 100644 --- a/tests/api_connexion/endpoints/test_event_log_endpoint.py +++ b/tests/api_connexion/endpoints/test_event_log_endpoint.py @@ -21,9 +21,10 @@ from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.models import Log from airflow.utils import timezone -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_logs + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_logs pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_extra_link_endpoint.py b/tests/api_connexion/endpoints/test_extra_link_endpoint.py index 2c3eacdc91dc0..373571a42ee47 100644 --- a/tests/api_connexion/endpoints/test_extra_link_endpoint.py +++ b/tests/api_connexion/endpoints/test_extra_link_endpoint.py @@ -30,11 +30,12 @@ from airflow.utils import timezone from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import create_user, delete_user -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, BaseOperatorLink -from tests.test_utils.db import clear_db_runs, clear_db_xcom -from tests.test_utils.mock_operators import CustomOperator -from tests.test_utils.mock_plugins import mock_plugin_manager + +from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BaseOperatorLink +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom +from dev.tests_common.test_utils.mock_operators import CustomOperator +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/api_connexion/endpoints/test_import_error_endpoint.py b/tests/api_connexion/endpoints/test_import_error_endpoint.py index af2b83ebb1eed..f4f7d03b323f2 100644 --- a/tests/api_connexion/endpoints/test_import_error_endpoint.py +++ b/tests/api_connexion/endpoints/test_import_error_endpoint.py @@ -23,10 +23,11 @@ from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.utils import timezone from airflow.utils.session import provide_session -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.compat import ParseImportError -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_import_errors + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.compat import ParseImportError +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_import_errors pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_log_endpoint.py b/tests/api_connexion/endpoints/test_log_endpoint.py index 2b112e3221843..21d4e8feb83ad 100644 --- a/tests/api_connexion/endpoints/test_log_endpoint.py +++ b/tests/api_connexion/endpoints/test_log_endpoint.py @@ -32,8 +32,9 @@ from airflow.operators.empty import EmptyOperator from airflow.utils import timezone from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_runs pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py b/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py index fc53b8952f4aa..e41bbcc6c2e88 100644 --- a/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py +++ b/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py @@ -32,9 +32,10 @@ from airflow.utils.session import provide_session from airflow.utils.state import State, TaskInstanceState from airflow.utils.timezone import datetime -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields +from dev.tests_common.test_utils.mock_operators import MockOperator pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_plugin_endpoint.py b/tests/api_connexion/endpoints/test_plugin_endpoint.py index 6670c684cb45d..2831af06bf3d9 100644 --- a/tests/api_connexion/endpoints/test_plugin_endpoint.py +++ b/tests/api_connexion/endpoints/test_plugin_endpoint.py @@ -28,10 +28,11 @@ from airflow.ti_deps.deps.base_ti_dep import BaseTIDep from airflow.timetables.base import Timetable from airflow.utils.module_loading import qualname -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.compat import BaseOperatorLink -from tests.test_utils.config import conf_vars -from tests.test_utils.mock_plugins import mock_plugin_manager + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.compat import BaseOperatorLink +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_pool_endpoint.py b/tests/api_connexion/endpoints/test_pool_endpoint.py index 2cc095d077aa9..455c417bd2581 100644 --- a/tests/api_connexion/endpoints/test_pool_endpoint.py +++ b/tests/api_connexion/endpoints/test_pool_endpoint.py @@ -21,10 +21,11 @@ from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.models.pool import Pool from airflow.utils.session import provide_session -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_pools -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_pools +from dev.tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_provider_endpoint.py b/tests/api_connexion/endpoints/test_provider_endpoint.py index b4cf8f10a92ae..92e14887ec038 100644 --- a/tests/api_connexion/endpoints/test_provider_endpoint.py +++ b/tests/api_connexion/endpoints/test_provider_endpoint.py @@ -21,7 +21,8 @@ import pytest from airflow.providers_manager import ProviderInfo -from tests.test_utils.api_connexion_utils import create_user, delete_user + +from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_task_endpoint.py b/tests/api_connexion/endpoints/test_task_endpoint.py index b2e068bd507fe..60237946a82ca 100644 --- a/tests/api_connexion/endpoints/test_task_endpoint.py +++ b/tests/api_connexion/endpoints/test_task_endpoint.py @@ -27,8 +27,9 @@ from airflow.models.expandinput import EXPAND_INPUT_EMPTY from airflow.models.serialized_dag import SerializedDagModel from airflow.operators.empty import EmptyOperator -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_task_instance_endpoint.py b/tests/api_connexion/endpoints/test_task_instance_endpoint.py index b5b3163e988d0..809c2fab9aa2d 100644 --- a/tests/api_connexion/endpoints/test_task_instance_endpoint.py +++ b/tests/api_connexion/endpoints/test_task_instance_endpoint.py @@ -35,9 +35,10 @@ from airflow.utils.state import State from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields +from dev.tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_variable_endpoint.py b/tests/api_connexion/endpoints/test_variable_endpoint.py index aa5f7c99674f8..e4302636184d3 100644 --- a/tests/api_connexion/endpoints/test_variable_endpoint.py +++ b/tests/api_connexion/endpoints/test_variable_endpoint.py @@ -22,10 +22,11 @@ from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.models import Variable -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_variables -from tests.test_utils.www import _check_last_log + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_variables +from dev.tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_xcom_endpoint.py b/tests/api_connexion/endpoints/test_xcom_endpoint.py index 809e537f9f88d..0293e672c47d2 100644 --- a/tests/api_connexion/endpoints/test_xcom_endpoint.py +++ b/tests/api_connexion/endpoints/test_xcom_endpoint.py @@ -30,9 +30,10 @@ from airflow.utils.session import create_session from airflow.utils.timezone import utcnow from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom + +from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_connection_schema.py b/tests/api_connexion/schemas/test_connection_schema.py index cf614eb3b114e..d7a0de4e89f05 100644 --- a/tests/api_connexion/schemas/test_connection_schema.py +++ b/tests/api_connexion/schemas/test_connection_schema.py @@ -30,7 +30,8 @@ ) from airflow.models import Connection from airflow.utils.session import create_session, provide_session -from tests.test_utils.db import clear_db_connections + +from dev.tests_common.test_utils.db import clear_db_connections pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_dag_run_schema.py b/tests/api_connexion/schemas/test_dag_run_schema.py index 01ed33fd59886..b874ade4442ff 100644 --- a/tests/api_connexion/schemas/test_dag_run_schema.py +++ b/tests/api_connexion/schemas/test_dag_run_schema.py @@ -29,8 +29,9 @@ from airflow.utils import timezone from airflow.utils.session import provide_session from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/api_connexion/schemas/test_dataset_schema.py b/tests/api_connexion/schemas/test_dataset_schema.py index 4a6fdf6e6d513..769de6f1b10eb 100644 --- a/tests/api_connexion/schemas/test_dataset_schema.py +++ b/tests/api_connexion/schemas/test_dataset_schema.py @@ -30,7 +30,8 @@ from airflow.assets import Asset from airflow.models.asset import AssetAliasModel, AssetEvent, AssetModel from airflow.operators.empty import EmptyOperator -from tests.test_utils.db import clear_db_assets, clear_db_dags + +from dev.tests_common.test_utils.db import clear_db_assets, clear_db_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_error_schema.py b/tests/api_connexion/schemas/test_error_schema.py index fb5e9ea219d22..5c358f2e2c374 100644 --- a/tests/api_connexion/schemas/test_error_schema.py +++ b/tests/api_connexion/schemas/test_error_schema.py @@ -25,8 +25,9 @@ ) from airflow.utils import timezone from airflow.utils.session import provide_session -from tests.test_utils.compat import ParseImportError -from tests.test_utils.db import clear_db_import_errors + +from dev.tests_common.test_utils.compat import ParseImportError +from dev.tests_common.test_utils.db import clear_db_import_errors pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_plugin_schema.py b/tests/api_connexion/schemas/test_plugin_schema.py index 199db02c7268a..ee079ce664348 100644 --- a/tests/api_connexion/schemas/test_plugin_schema.py +++ b/tests/api_connexion/schemas/test_plugin_schema.py @@ -28,7 +28,8 @@ ) from airflow.hooks.base import BaseHook from airflow.plugins_manager import AirflowPlugin -from tests.test_utils.compat import BaseOperatorLink + +from dev.tests_common.test_utils.compat import BaseOperatorLink class PluginHook(BaseHook): ... diff --git a/tests/api_connexion/schemas/test_pool_schemas.py b/tests/api_connexion/schemas/test_pool_schemas.py index 110103073aaa5..23788426c5018 100644 --- a/tests/api_connexion/schemas/test_pool_schemas.py +++ b/tests/api_connexion/schemas/test_pool_schemas.py @@ -21,7 +21,8 @@ from airflow.api_connexion.schemas.pool_schema import PoolCollection, pool_collection_schema, pool_schema from airflow.models.pool import Pool from airflow.utils.session import provide_session -from tests.test_utils.db import clear_db_pools + +from dev.tests_common.test_utils.db import clear_db_pools pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_xcom_schema.py b/tests/api_connexion/schemas/test_xcom_schema.py index e1f97dad0520d..06c36c31b7767 100644 --- a/tests/api_connexion/schemas/test_xcom_schema.py +++ b/tests/api_connexion/schemas/test_xcom_schema.py @@ -30,7 +30,8 @@ from airflow.models import DagRun, XCom from airflow.utils.dates import parse_execution_date from airflow.utils.session import create_session -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/test_auth.py b/tests/api_connexion/test_auth.py index 54e5632ad84d1..e568d618ad662 100644 --- a/tests/api_connexion/test_auth.py +++ b/tests/api_connexion/test_auth.py @@ -22,9 +22,10 @@ from airflow.auth.managers.simple.simple_auth_manager import SimpleAuthManager from airflow.auth.managers.simple.user import SimpleAuthManagerUser -from tests.test_utils.api_connexion_utils import assert_401 -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_pools + +from dev.tests_common.test_utils.api_connexion_utils import assert_401 +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_pools pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/test_parameters.py b/tests/api_connexion/test_parameters.py index d5fdbb79b9cc1..57c9c05415f65 100644 --- a/tests/api_connexion/test_parameters.py +++ b/tests/api_connexion/test_parameters.py @@ -29,7 +29,8 @@ validate_istimezone, ) from airflow.utils import timezone -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars class TestValidateIsTimezone: diff --git a/tests/api_connexion/test_security.py b/tests/api_connexion/test_security.py index c6a112b1a1bb9..2051cf613510e 100644 --- a/tests/api_connexion/test_security.py +++ b/tests/api_connexion/test_security.py @@ -18,7 +18,7 @@ import pytest -from tests.test_utils.api_connexion_utils import create_user, delete_user +from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_fastapi/views/public/test_connections.py b/tests/api_fastapi/views/public/test_connections.py index 0b76fdbe4f1b2..a5cb301a43136 100644 --- a/tests/api_fastapi/views/public/test_connections.py +++ b/tests/api_fastapi/views/public/test_connections.py @@ -20,7 +20,8 @@ from airflow.models import Connection from airflow.utils.session import provide_session -from tests.test_utils.db import clear_db_connections + +from dev.tests_common.test_utils.db import clear_db_connections pytestmark = pytest.mark.db_test diff --git a/tests/api_fastapi/views/public/test_dags.py b/tests/api_fastapi/views/public/test_dags.py index 58b3daf35c64a..7ac93a2f2e07d 100644 --- a/tests/api_fastapi/views/public/test_dags.py +++ b/tests/api_fastapi/views/public/test_dags.py @@ -27,7 +27,8 @@ from airflow.utils.session import provide_session from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = pytest.mark.db_test diff --git a/tests/api_fastapi/views/public/test_variables.py b/tests/api_fastapi/views/public/test_variables.py index 0ee2cebd77340..7957d0bf22249 100644 --- a/tests/api_fastapi/views/public/test_variables.py +++ b/tests/api_fastapi/views/public/test_variables.py @@ -20,7 +20,8 @@ from airflow.models.variable import Variable from airflow.utils.session import provide_session -from tests.test_utils.db import clear_db_variables + +from dev.tests_common.test_utils.db import clear_db_variables pytestmark = pytest.mark.db_test diff --git a/tests/api_fastapi/views/ui/test_assets.py b/tests/api_fastapi/views/ui/test_assets.py index 99cd9bc1e9960..d0e93c709b076 100644 --- a/tests/api_fastapi/views/ui/test_assets.py +++ b/tests/api_fastapi/views/ui/test_assets.py @@ -20,7 +20,8 @@ from airflow.assets import Asset from airflow.operators.empty import EmptyOperator -from tests.conftest import initial_db_init + +from dev.tests_common.test_utils.db import initial_db_init pytestmark = pytest.mark.db_test diff --git a/tests/api_fastapi/views/ui/test_dashboard.py b/tests/api_fastapi/views/ui/test_dashboard.py index 970b79ad3568c..bee683535704f 100644 --- a/tests/api_fastapi/views/ui/test_dashboard.py +++ b/tests/api_fastapi/views/ui/test_dashboard.py @@ -26,7 +26,8 @@ from airflow.operators.empty import EmptyOperator from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils.db import clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/api_internal/endpoints/test_rpc_api_endpoint.py b/tests/api_internal/endpoints/test_rpc_api_endpoint.py index f12e0ae087bbb..364b0f44eaf27 100644 --- a/tests/api_internal/endpoints/test_rpc_api_endpoint.py +++ b/tests/api_internal/endpoints/test_rpc_api_endpoint.py @@ -34,8 +34,9 @@ from airflow.utils.jwt_signer import JWTSigner from airflow.utils.state import State from airflow.www import app -from tests.test_utils.config import conf_vars -from tests.test_utils.decorators import dont_initialize_flask_app_submodules + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules # Note: Sounds a bit strange to disable internal API tests in isolation mode but... # As long as the test is modelled to run its own internal API endpoints, it is conflicting diff --git a/tests/api_internal/test_internal_api_call.py b/tests/api_internal/test_internal_api_call.py index d779b504ea479..02ae2d9f55125 100644 --- a/tests/api_internal/test_internal_api_call.py +++ b/tests/api_internal/test_internal_api_call.py @@ -34,7 +34,8 @@ from airflow.serialization.serialized_objects import BaseSerialization from airflow.settings import _ENABLE_AIP_44 from airflow.utils.state import State -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars if TYPE_CHECKING: from airflow.serialization.pydantic.taskinstance import TaskInstancePydantic diff --git a/tests/assets/test_manager.py b/tests/assets/test_manager.py index 950949fe474e3..cd4563d9a9c4e 100644 --- a/tests/assets/test_manager.py +++ b/tests/assets/test_manager.py @@ -48,7 +48,7 @@ @pytest.fixture def clear_assets(): - from tests.test_utils.db import clear_db_assets + from dev.tests_common.test_utils.db import clear_db_assets clear_db_assets() yield diff --git a/tests/assets/tests_asset.py b/tests/assets/tests_asset.py index afbb46827f3a9..10d91e7a00fb7 100644 --- a/tests/assets/tests_asset.py +++ b/tests/assets/tests_asset.py @@ -43,7 +43,7 @@ @pytest.fixture def clear_assets(): - from tests.test_utils.db import clear_db_assets + from dev.tests_common.test_utils.db import clear_db_assets clear_db_assets() yield diff --git a/tests/auth/managers/simple/views/test_auth.py b/tests/auth/managers/simple/views/test_auth.py index 197ed0e615cae..a4e2a12fdcf1c 100644 --- a/tests/auth/managers/simple/views/test_auth.py +++ b/tests/auth/managers/simple/views/test_auth.py @@ -23,7 +23,8 @@ from airflow.auth.managers.simple.simple_auth_manager import SimpleAuthManager from airflow.www import app as application -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars @pytest.fixture diff --git a/tests/cli/commands/test_celery_command.py b/tests/cli/commands/test_celery_command.py index 476e6279fbf12..c29d835d8ee07 100644 --- a/tests/cli/commands/test_celery_command.py +++ b/tests/cli/commands/test_celery_command.py @@ -30,7 +30,8 @@ from airflow.cli.commands import celery_command from airflow.configuration import conf from airflow.executors import executor_loader -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_config_command.py b/tests/cli/commands/test_config_command.py index 030303c28ec4d..90d38fa27ad03 100644 --- a/tests/cli/commands/test_config_command.py +++ b/tests/cli/commands/test_config_command.py @@ -22,7 +22,8 @@ from airflow.cli import cli_parser from airflow.cli.commands import config_command -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars STATSD_CONFIG_BEGIN_WITH = "# `StatsD `" diff --git a/tests/cli/commands/test_connection_command.py b/tests/cli/commands/test_connection_command.py index fbfd3d2945e4a..06578eb1492c1 100644 --- a/tests/cli/commands/test_connection_command.py +++ b/tests/cli/commands/test_connection_command.py @@ -33,7 +33,8 @@ from airflow.models import Connection from airflow.utils.db import merge_conn from airflow.utils.session import create_session -from tests.test_utils.db import clear_db_connections + +from dev.tests_common.test_utils.db import clear_db_connections pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_dag_command.py b/tests/cli/commands/test_dag_command.py index 338ae44a2b95a..00364794f1d53 100644 --- a/tests/cli/commands/test_dag_command.py +++ b/tests/cli/commands/test_dag_command.py @@ -47,8 +47,9 @@ from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType from tests.models import TEST_DAGS_FOLDER -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs DEFAULT_DATE = timezone.make_aware(datetime(2015, 1, 1), timezone=timezone.utc) if pendulum.__version__.startswith("3"): diff --git a/tests/cli/commands/test_dag_processor_command.py b/tests/cli/commands/test_dag_processor_command.py index 2b84f506e78f1..643710ce1e415 100644 --- a/tests/cli/commands/test_dag_processor_command.py +++ b/tests/cli/commands/test_dag_processor_command.py @@ -24,7 +24,8 @@ from airflow.cli import cli_parser from airflow.cli.commands import dag_processor_command from airflow.configuration import conf -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_info_command.py b/tests/cli/commands/test_info_command.py index 44287bb0ec162..c6bea2d8a5bb8 100644 --- a/tests/cli/commands/test_info_command.py +++ b/tests/cli/commands/test_info_command.py @@ -32,7 +32,8 @@ from airflow.config_templates import airflow_local_settings from airflow.logging_config import configure_logging from airflow.version import version as airflow_version -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars def capture_show_output(instance): diff --git a/tests/cli/commands/test_internal_api_command.py b/tests/cli/commands/test_internal_api_command.py index 194f7874839c8..11123c7c211a1 100644 --- a/tests/cli/commands/test_internal_api_command.py +++ b/tests/cli/commands/test_internal_api_command.py @@ -32,7 +32,8 @@ from airflow.cli.commands.internal_api_command import GunicornMonitor from airflow.settings import _ENABLE_AIP_44 from tests.cli.commands._common_cli_classes import _CommonCLIGunicornTestClass -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars console = Console(width=400, color_system="standard") diff --git a/tests/cli/commands/test_jobs_command.py b/tests/cli/commands/test_jobs_command.py index 28c739a2f2607..61540a68c51c6 100644 --- a/tests/cli/commands/test_jobs_command.py +++ b/tests/cli/commands/test_jobs_command.py @@ -27,7 +27,8 @@ from airflow.jobs.scheduler_job_runner import SchedulerJobRunner from airflow.utils.session import create_session from airflow.utils.state import JobState, State -from tests.test_utils.db import clear_db_jobs + +from dev.tests_common.test_utils.db import clear_db_jobs @pytest.mark.skip_if_database_isolation_mode diff --git a/tests/cli/commands/test_kerberos_command.py b/tests/cli/commands/test_kerberos_command.py index 35e90fb54923d..4a5e73ece788f 100644 --- a/tests/cli/commands/test_kerberos_command.py +++ b/tests/cli/commands/test_kerberos_command.py @@ -23,7 +23,8 @@ from airflow.cli import cli_parser from airflow.cli.commands import kerberos_command from airflow.security.kerberos import KerberosMode -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_kubernetes_command.py b/tests/cli/commands/test_kubernetes_command.py index d489aa3ea93f1..1e53bdc77d8d8 100644 --- a/tests/cli/commands/test_kubernetes_command.py +++ b/tests/cli/commands/test_kubernetes_command.py @@ -28,7 +28,8 @@ from airflow.cli import cli_parser from airflow.cli.commands import kubernetes_command from airflow.executors import executor_loader -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_plugins_command.py b/tests/cli/commands/test_plugins_command.py index 288b4453bb159..703b6879d7a3a 100644 --- a/tests/cli/commands/test_plugins_command.py +++ b/tests/cli/commands/test_plugins_command.py @@ -29,7 +29,8 @@ from airflow.listeners.listener import get_listener_manager from airflow.plugins_manager import AirflowPlugin from tests.plugins.test_plugin import AirflowTestPlugin as ComplexAirflowPlugin -from tests.test_utils.mock_plugins import mock_plugin_manager + +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] @@ -88,15 +89,15 @@ def test_should_display_one_plugin(self): } ], "global_operator_extra_links": [ - "", - "", + "", + "", ], "timetables": ["tests.plugins.test_plugin.CustomCronDataIntervalTimetable"], "operator_extra_links": [ - "", - "", - "", - "", + "", + "", + "", + "", ], "hooks": ["tests.plugins.test_plugin.PluginHook"], "listeners": [ diff --git a/tests/cli/commands/test_rotate_fernet_key_command.py b/tests/cli/commands/test_rotate_fernet_key_command.py index d45753d75f70d..d638c2010e187 100644 --- a/tests/cli/commands/test_rotate_fernet_key_command.py +++ b/tests/cli/commands/test_rotate_fernet_key_command.py @@ -26,8 +26,9 @@ from airflow.hooks.base import BaseHook from airflow.models import Connection, Variable from airflow.utils.session import provide_session -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_connections, clear_db_variables + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_connections, clear_db_variables pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_scheduler_command.py b/tests/cli/commands/test_scheduler_command.py index 2e28196cb5c3c..8fce5d6e289b3 100644 --- a/tests/cli/commands/test_scheduler_command.py +++ b/tests/cli/commands/test_scheduler_command.py @@ -29,7 +29,8 @@ from airflow.executors import executor_loader from airflow.utils.scheduler_health import HealthServer, serve_health_check from airflow.utils.serve_logs import serve_logs -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_task_command.py b/tests/cli/commands/test_task_command.py index 3397005b80eb0..fb512792eb324 100644 --- a/tests/cli/commands/test_task_command.py +++ b/tests/cli/commands/test_task_command.py @@ -51,9 +51,10 @@ from airflow.utils.session import create_session from airflow.utils.state import State, TaskInstanceState from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_pools, clear_db_runs + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_pools, clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/cli/commands/test_variable_command.py b/tests/cli/commands/test_variable_command.py index f95e7b1648277..d35184c73c80a 100644 --- a/tests/cli/commands/test_variable_command.py +++ b/tests/cli/commands/test_variable_command.py @@ -29,7 +29,8 @@ from airflow.cli.commands import variable_command from airflow.models import Variable from airflow.utils.session import create_session -from tests.test_utils.db import clear_db_variables + +from dev.tests_common.test_utils.db import clear_db_variables pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_webserver_command.py b/tests/cli/commands/test_webserver_command.py index 5531f674689b9..b72f4e4596035 100644 --- a/tests/cli/commands/test_webserver_command.py +++ b/tests/cli/commands/test_webserver_command.py @@ -31,7 +31,8 @@ from airflow.cli.commands import webserver_command from airflow.cli.commands.webserver_command import GunicornMonitor from tests.cli.commands._common_cli_classes import _CommonCLIGunicornTestClass -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars console = Console(width=400, color_system="standard") diff --git a/tests/cli/conftest.py b/tests/cli/conftest.py index 9f0a63af06978..61e27b0e74de4 100644 --- a/tests/cli/conftest.py +++ b/tests/cli/conftest.py @@ -21,12 +21,12 @@ import pytest -from airflow.cli import cli_parser from airflow.executors import local_executor from airflow.models.dagbag import DagBag from airflow.providers.celery.executors import celery_executor, celery_kubernetes_executor from airflow.providers.cncf.kubernetes.executors import kubernetes_executor, local_kubernetes_executor -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars # Create custom executors here because conftest is imported first custom_executor_module = type(sys)("custom_executor") @@ -61,4 +61,6 @@ def dagbag(): @pytest.fixture(scope="session") def parser(): + from airflow.cli import cli_parser + return cli_parser.get_parser() diff --git a/tests/cli/test_cli_parser.py b/tests/cli/test_cli_parser.py index 2244b6dbd5860..46ac36803face 100644 --- a/tests/cli/test_cli_parser.py +++ b/tests/cli/test_cli_parser.py @@ -43,7 +43,8 @@ from airflow.executors.local_executor import LocalExecutor from airflow.providers.amazon.aws.executors.ecs.ecs_executor import AwsEcsExecutor from airflow.providers.celery.executors.celery_executor import CeleryExecutor -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/conftest.py b/tests/conftest.py index 60d009416fe8e..f956865e7a1c6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,126 +16,33 @@ # under the License. from __future__ import annotations -import json import os -import platform -import re -import subprocess import sys -from contextlib import ExitStack, suppress -from datetime import datetime, timedelta, timezone -from pathlib import Path from typing import TYPE_CHECKING import pytest -import time_machine -from itsdangerous import URLSafeSerializer - -if TYPE_CHECKING: - from tests._internals.capture_warnings import CaptureWarningsPlugin # noqa: F401 - from tests._internals.forbidden_warnings import ForbiddenWarningsPlugin # noqa: F401 # We should set these before loading _any_ of the rest of airflow so that the # unit test mode config is set as early as possible. assert "airflow" not in sys.modules, "No airflow module can be imported before these lines" -keep_env_variables = "--keep-env-variables" in sys.argv - -if not keep_env_variables: - # Clear all Environment Variables that might have side effect, - # For example, defined in /files/airflow-breeze-config/variables.env - _AIRFLOW_CONFIG_PATTERN = re.compile(r"^AIRFLOW__(.+)__(.+)$") - _KEEP_CONFIGS_SETTINGS: dict[str, dict[str, set[str]]] = { - # Keep always these configurations - "always": { - "database": {"sql_alchemy_conn"}, - "core": {"sql_alchemy_conn"}, - "celery": {"result_backend", "broker_url"}, - }, - # Keep per enabled integrations - "celery": {"celery": {"*"}, "celery_broker_transport_options": {"*"}}, - "kerberos": {"kerberos": {"*"}}, - } - if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true": - _KEEP_CONFIGS_SETTINGS["always"].update( - { - "core": { - "internal_api_url", - "fernet_key", - "database_access_isolation", - "internal_api_secret_key", - "internal_api_clock_grace", - }, - } - ) - _ENABLED_INTEGRATIONS = {e.split("_", 1)[-1].lower() for e in os.environ if e.startswith("INTEGRATION_")} - _KEEP_CONFIGS: dict[str, set[str]] = {} - for keep_settings_key in ("always", *_ENABLED_INTEGRATIONS): - if keep_settings := _KEEP_CONFIGS_SETTINGS.get(keep_settings_key): - for section, options in keep_settings.items(): - if section not in _KEEP_CONFIGS: - _KEEP_CONFIGS[section] = options - else: - _KEEP_CONFIGS[section].update(options) - for env_key in os.environ.copy(): - if m := _AIRFLOW_CONFIG_PATTERN.match(env_key): - section, option = m.group(1).lower(), m.group(2).lower() - if not (ko := _KEEP_CONFIGS.get(section)) or not ("*" in ko or option in ko): - del os.environ[env_key] - -SUPPORTED_DB_BACKENDS = ("sqlite", "postgres", "mysql") - -# A bit of a Hack - but we need to check args before they are parsed by pytest in order to -# configure the DB before Airflow gets initialized (which happens at airflow import time). -# Using env variables also handles the case, when python-xdist is used - python-xdist spawns separate -# processes and does not pass all args to them (it's done via env variables) so we are doing the -# same here and detect whether `--skip-db-tests` or `--run-db-tests-only` is passed to pytest -# and set env variables so the processes spawned by python-xdist can read the status from there -skip_db_tests = "--skip-db-tests" in sys.argv or os.environ.get("_AIRFLOW_SKIP_DB_TESTS") == "true" -run_db_tests_only = ( - "--run-db-tests-only" in sys.argv or os.environ.get("_AIRFLOW_RUN_DB_TESTS_ONLY") == "true" -) - -if skip_db_tests: - if run_db_tests_only: - raise Exception("You cannot specify both --skip-db-tests and --run-db-tests-only together") - # Make sure sqlalchemy will not be usable for pure unit tests even if initialized - os.environ["AIRFLOW__CORE__SQL_ALCHEMY_CONN"] = "bad_schema:///" - os.environ["AIRFLOW__DATABASE__SQL_ALCHEMY_CONN"] = "bad_schema:///" - os.environ["_IN_UNIT_TESTS"] = "true" - # Set it here to pass the flag to python-xdist spawned processes - os.environ["_AIRFLOW_SKIP_DB_TESTS"] = "true" - -if run_db_tests_only: - # Set it here to pass the flag to python-xdist spawned processes - os.environ["_AIRFLOW_RUN_DB_TESTS_ONLY"] = "true" - -AIRFLOW_TESTS_DIR = Path(os.path.dirname(os.path.realpath(__file__))).resolve() -AIRFLOW_SOURCES_ROOT_DIR = AIRFLOW_TESTS_DIR.parent.parent - -os.environ["AIRFLOW__CORE__PLUGINS_FOLDER"] = os.fspath(AIRFLOW_TESTS_DIR / "plugins") -os.environ["AIRFLOW__CORE__DAGS_FOLDER"] = os.fspath(AIRFLOW_TESTS_DIR / "dags") -os.environ["AIRFLOW__CORE__UNIT_TEST_MODE"] = "True" -os.environ["AWS_DEFAULT_REGION"] = os.environ.get("AWS_DEFAULT_REGION") or "us-east-1" -os.environ["CREDENTIALS_DIR"] = os.environ.get("CREDENTIALS_DIR") or "/files/airflow-breeze-config/keys" -os.environ["AIRFLOW_ENABLE_AIP_44"] = os.environ.get("AIRFLOW_ENABLE_AIP_44") or "true" - -if platform.system() == "Darwin": - # mocks from unittest.mock work correctly in subprocesses only if they are created by "fork" method - # but macOS uses "spawn" by default - os.environ["AIRFLOW__CORE__MP_START_METHOD"] = "fork" +pytest_plugins = "dev.tests_common.pytest_plugin" # Ignore files that are really test dags to be ignored by pytest collect_ignore = [ "tests/dags/subdir1/test_ignore_this.py", "tests/dags/test_invalid_dup_task.py", "tests/dags_corrupted/test_impersonation_custom.py", - "tests/test_utils/perf/dags/elastic_dag.py", + "dev.tests_common.test_utils/perf/dags/elastic_dag.py", ] -# https://docs.pytest.org/en/stable/reference/reference.html#stash -capture_warnings_key = pytest.StashKey["CaptureWarningsPlugin"]() -forbidden_warnings_key = pytest.StashKey["ForbiddenWarningsPlugin"]() + +@pytest.hookimpl(tryfirst=True) +def pytest_configure(config: pytest.Config) -> None: + dep_path = [config.rootpath.joinpath("tests", "deprecations_ignore.yml")] + config.inicfg["airflow_deprecations_ignore"] = ( + config.inicfg.get("airflow_deprecations_ignore", []) + dep_path # type: ignore[assignment,operator] + ) @pytest.fixture @@ -151,1308 +58,6 @@ def reset_environment(): os.environ[key] = init_env[key] -@pytest.fixture -def secret_key() -> str: - """Return secret key configured.""" - from airflow.configuration import conf - - the_key = conf.get("webserver", "SECRET_KEY") - if the_key is None: - raise RuntimeError( - "The secret key SHOULD be configured as `[webserver] secret_key` in the " - "configuration/environment at this stage! " - ) - return the_key - - -@pytest.fixture -def url_safe_serializer(secret_key) -> URLSafeSerializer: - return URLSafeSerializer(secret_key) - - -@pytest.fixture -def reset_db(): - """Resets Airflow db.""" - - from airflow.utils import db - - db.resetdb() - - -ALLOWED_TRACE_SQL_COLUMNS = ["num", "time", "trace", "sql", "parameters", "count"] - - -@pytest.fixture(autouse=True) -def trace_sql(request): - from tests.test_utils.perf.perf_kit.sqlalchemy import ( # isort: skip - count_queries, - trace_queries, - ) - - """Displays queries from the tests to console.""" - trace_sql_option = request.config.option.trace_sql - if not trace_sql_option: - yield - return - - terminal_reporter = request.config.pluginmanager.getplugin("terminalreporter") - # if no terminal reporter plugin is present, nothing we can do here; - # this can happen when this function executes in a worker node - # when using pytest-xdist, for example - if terminal_reporter is None: - yield - return - - columns = [col.strip() for col in trace_sql_option.split(",")] - - def pytest_print(text): - return terminal_reporter.write_line(text) - - with ExitStack() as exit_stack: - if columns == ["num"]: - # It is very unlikely that the user wants to display only numbers, but probably - # the user just wants to count the queries. - exit_stack.enter_context(count_queries(print_fn=pytest_print)) - elif any(c in columns for c in ["time", "trace", "sql", "parameters"]): - exit_stack.enter_context( - trace_queries( - display_num="num" in columns, - display_time="time" in columns, - display_trace="trace" in columns, - display_sql="sql" in columns, - display_parameters="parameters" in columns, - print_fn=pytest_print, - ) - ) - - yield - - -@pytest.fixture(autouse=True, scope="session") -def set_db_isolation_mode(): - if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true": - from airflow.api_internal.internal_api_call import InternalApiConfig - - InternalApiConfig.set_use_internal_api("tests", allow_tests_to_use_db=True) - - -def skip_if_database_isolation_mode(item): - if os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true": - for _ in item.iter_markers(name="skip_if_database_isolation_mode"): - pytest.skip("This test is skipped because it is not allowed in database isolation mode.") - - -def pytest_addoption(parser: pytest.Parser): - """Add options parser for custom plugins.""" - group = parser.getgroup("airflow") - group.addoption( - "--with-db-init", - action="store_true", - dest="db_init", - help="Forces database initialization before tests", - ) - group.addoption( - "--integration", - action="append", - dest="integration", - metavar="INTEGRATIONS", - help="only run tests matching integration specified: " - "[cassandra,kerberos,mongo,celery,statsd,trino]. ", - ) - group.addoption( - "--keep-env-variables", - action="store_true", - dest="keep_env_variables", - help="do not clear environment variables that might have side effect while running tests", - ) - group.addoption( - "--skip-db-tests", - action="store_true", - dest="skip_db_tests", - help="skip tests that require database", - ) - group.addoption( - "--run-db-tests-only", - action="store_true", - dest="run_db_tests_only", - help="only run tests requiring database", - ) - group.addoption( - "--backend", - action="store", - dest="backend", - metavar="BACKEND", - help="only run tests matching the backend: [sqlite,postgres,mysql].", - ) - group.addoption( - "--system", - action="append", - dest="system", - metavar="SYSTEMS", - help="only run tests matching the system specified [google.cloud, google.marketing_platform]", - ) - group.addoption( - "--include-long-running", - action="store_true", - dest="include_long_running", - help="Includes long running tests (marked with long_running marker). They are skipped by default.", - ) - group.addoption( - "--include-quarantined", - action="store_true", - dest="include_quarantined", - help="Includes quarantined tests (marked with quarantined marker). They are skipped by default.", - ) - group.addoption( - "--exclude-virtualenv-operator", - action="store_true", - dest="exclude_virtualenv_operator", - help="Excludes virtualenv operators tests (marked with virtualenv_test marker).", - ) - group.addoption( - "--exclude-external-python-operator", - action="store_true", - dest="exclude_external_python_operator", - help="Excludes external python operator tests (marked with external_python_test marker).", - ) - allowed_trace_sql_columns_list = ",".join(ALLOWED_TRACE_SQL_COLUMNS) - group.addoption( - "--trace-sql", - action="store", - dest="trace_sql", - help=( - "Trace SQL statements. As an argument, you must specify the columns to be " - f"displayed as a comma-separated list. Supported values: [f{allowed_trace_sql_columns_list}]" - ), - metavar="COLUMNS", - ) - group.addoption( - "--no-db-cleanup", - action="store_false", - dest="db_cleanup", - help="Disable DB clear before each test module.", - ) - group.addoption( - "--disable-forbidden-warnings", - action="store_true", - dest="disable_forbidden_warnings", - help="Disable raising an error if forbidden warnings detected.", - ) - group.addoption( - "--disable-capture-warnings", - action="store_true", - dest="disable_capture_warnings", - help="Disable internal capture warnings.", - ) - group.addoption( - "--warning-output-path", - action="store", - dest="warning_output_path", - metavar="PATH", - help=( - "Path for resulting captured warnings. Absolute or relative to the `tests` directory. " - "If not provided or environment variable `CAPTURE_WARNINGS_OUTPUT` not set " - "then 'warnings.txt' will be used." - ), - ) - parser.addini( - name="forbidden_warnings", - type="linelist", - help="List of internal Airflow warnings which are prohibited during tests execution.", - ) - - -def initial_db_init(): - from flask import Flask - - from airflow.configuration import conf - from airflow.utils import db - from airflow.www.extensions.init_appbuilder import init_appbuilder - from airflow.www.extensions.init_auth_manager import get_auth_manager - from tests.test_utils.compat import AIRFLOW_V_2_8_PLUS - - db.resetdb() - db.bootstrap_dagbag() - # minimal app to add roles - flask_app = Flask(__name__) - flask_app.config["SQLALCHEMY_DATABASE_URI"] = conf.get("database", "SQL_ALCHEMY_CONN") - init_appbuilder(flask_app) - if AIRFLOW_V_2_8_PLUS: - get_auth_manager().init() - - -@pytest.fixture(autouse=True, scope="session") -def initialize_airflow_tests(request): - """Helper that setups Airflow testing environment.""" - print(" AIRFLOW ".center(60, "=")) - - # Setup test environment for breeze - home = os.path.expanduser("~") - airflow_home = os.environ.get("AIRFLOW_HOME") or os.path.join(home, "airflow") - - print(f"Home of the user: {home}\nAirflow home {airflow_home}") - - # Initialize Airflow db if required - lock_file = os.path.join(airflow_home, ".airflow_db_initialised") - if not skip_db_tests: - if request.config.option.db_init: - print("Initializing the DB - forced with --with-db-init switch.") - initial_db_init() - elif not os.path.exists(lock_file): - print( - "Initializing the DB - first time after entering the container.\n" - "You can force re-initialization the database by adding --with-db-init switch to run-tests." - ) - initial_db_init() - # Create pid file - with open(lock_file, "w+"): - pass - else: - print( - "Skipping initializing of the DB as it was initialized already.\n" - "You can re-initialize the database by adding --with-db-init flag when running tests." - ) - integration_kerberos = os.environ.get("INTEGRATION_KERBEROS") - if integration_kerberos == "true": - # Initialize kerberos - kerberos = os.environ.get("KRB5_KTNAME") - if kerberos: - subprocess.check_call(["kinit", "-kt", kerberos, "bob@EXAMPLE.COM"]) - else: - print("Kerberos enabled! Please setup KRB5_KTNAME environment variable") - sys.exit(1) - - -def pytest_configure(config: pytest.Config) -> None: - if (backend := config.getoption("backend", default=None)) and backend not in SUPPORTED_DB_BACKENDS: - msg = ( - f"Provided DB backend {backend!r} not supported, " - f"expected one of: {', '.join(map(repr, SUPPORTED_DB_BACKENDS))}" - ) - pytest.exit(msg, returncode=6) - - config.addinivalue_line("markers", "integration(name): mark test to run with named integration") - config.addinivalue_line("markers", "backend(name): mark test to run with named backend") - config.addinivalue_line("markers", "system(name): mark test to run with named system") - config.addinivalue_line("markers", "platform(name): mark test to run with specific platform/environment") - config.addinivalue_line("markers", "long_running: mark test that run for a long time (many minutes)") - config.addinivalue_line( - "markers", "quarantined: mark test that are in quarantine (i.e. flaky, need to be isolated and fixed)" - ) - config.addinivalue_line( - "markers", "credential_file(name): mark tests that require credential file in CREDENTIALS_DIR" - ) - config.addinivalue_line( - "markers", "need_serialized_dag: mark tests that require dags in serialized form to be present" - ) - config.addinivalue_line( - "markers", - "db_test: mark tests that require database to be present", - ) - config.addinivalue_line( - "markers", - "non_db_test_override: you can mark individual tests with this marker to override the db_test marker", - ) - config.addinivalue_line( - "markers", - "virtualenv_operator: virtualenv operator tests are 'long', we should run them separately", - ) - config.addinivalue_line( - "markers", - "external_python_operator: external python operator tests are 'long', we should run them separately", - ) - config.addinivalue_line("markers", "enable_redact: do not mock redact secret masker") - config.addinivalue_line("markers", "skip_if_database_isolation_mode: skip if DB isolation is enabled") - - os.environ["_AIRFLOW__SKIP_DATABASE_EXECUTOR_COMPATIBILITY_CHECK"] = "1" - - # Setup internal warnings plugins - if "ignore" in sys.warnoptions: - config.option.disable_forbidden_warnings = True - config.option.disable_capture_warnings = True - if not config.pluginmanager.get_plugin("warnings"): - # Internal forbidden warnings plugin depends on builtin pytest warnings plugin - config.option.disable_forbidden_warnings = True - - forbidden_warnings: list[str] | None = config.getini("forbidden_warnings") - if not config.option.disable_forbidden_warnings and forbidden_warnings: - from tests._internals.forbidden_warnings import ForbiddenWarningsPlugin - - forbidden_warnings_plugin = ForbiddenWarningsPlugin( - config=config, - forbidden_warnings=tuple(map(str.strip, forbidden_warnings)), - ) - config.pluginmanager.register(forbidden_warnings_plugin) - config.stash[forbidden_warnings_key] = forbidden_warnings_plugin - - if not config.option.disable_capture_warnings: - from tests._internals.capture_warnings import CaptureWarningsPlugin - - capture_warnings_plugin = CaptureWarningsPlugin( - config=config, output_path=config.getoption("warning_output_path", default=None) - ) - config.pluginmanager.register(capture_warnings_plugin) - config.stash[capture_warnings_key] = capture_warnings_plugin - - -def pytest_unconfigure(config: pytest.Config) -> None: - os.environ.pop("_AIRFLOW__SKIP_DATABASE_EXECUTOR_COMPATIBILITY_CHECK", None) - if forbidden_warnings_plugin := config.stash.get(forbidden_warnings_key, None): - del config.stash[forbidden_warnings_key] - config.pluginmanager.unregister(forbidden_warnings_plugin) - if capture_warnings_plugin := config.stash.get(capture_warnings_key, None): - del config.stash[capture_warnings_key] - config.pluginmanager.unregister(capture_warnings_plugin) - - -def skip_if_not_marked_with_integration(selected_integrations, item): - for marker in item.iter_markers(name="integration"): - integration_name = marker.args[0] - if integration_name in selected_integrations or "all" in selected_integrations: - return - pytest.skip( - f"The test is skipped because it does not have the right integration marker. " - f"Only tests marked with pytest.mark.integration(INTEGRATION) are run with INTEGRATION " - f"being one of {selected_integrations}. {item}" - ) - - -def skip_if_not_marked_with_backend(selected_backend, item): - for marker in item.iter_markers(name="backend"): - backend_names = marker.args - if selected_backend in backend_names: - return - pytest.skip( - f"The test is skipped because it does not have the right backend marker. " - f"Only tests marked with pytest.mark.backend('{selected_backend}') are run: {item}" - ) - - -def skip_if_platform_doesnt_match(marker): - allowed_platforms = ("linux", "breeze") - if not (args := marker.args): - pytest.fail(f"No platform specified, expected one of: {', '.join(map(repr, allowed_platforms))}") - elif not all(a in allowed_platforms for a in args): - pytest.fail( - f"Allowed platforms {', '.join(map(repr, allowed_platforms))}; " - f"but got: {', '.join(map(repr, args))}" - ) - if "linux" in args: - if not sys.platform.startswith("linux"): - pytest.skip("Test expected to run on Linux platform.") - if "breeze" in args: - if not os.path.isfile("/.dockerenv") or os.environ.get("BREEZE", "").lower() != "true": - raise pytest.skip( - "Test expected to run into Airflow Breeze container. " - "Maybe because it is to dangerous to run it outside." - ) - - -def skip_if_not_marked_with_system(selected_systems, item): - for marker in item.iter_markers(name="system"): - systems_name = marker.args[0] - if systems_name in selected_systems or "all" in selected_systems: - return - pytest.skip( - f"The test is skipped because it does not have the right system marker. " - f"Only tests marked with pytest.mark.system(SYSTEM) are run with SYSTEM " - f"being one of {selected_systems}. {item}" - ) - - -def skip_system_test(item): - for marker in item.iter_markers(name="system"): - pytest.skip( - f"The test is skipped because it has system marker. System tests are only run when " - f"--system flag with the right system ({marker.args[0]}) is passed to pytest. {item}" - ) - - -def skip_long_running_test(item): - for _ in item.iter_markers(name="long_running"): - pytest.skip( - f"The test is skipped because it has long_running marker. " - f"And --include-long-running flag is not passed to pytest. {item}" - ) - - -def skip_quarantined_test(item): - for _ in item.iter_markers(name="quarantined"): - pytest.skip( - f"The test is skipped because it has quarantined marker. " - f"And --include-quarantined flag is not passed to pytest. {item}" - ) - - -def skip_virtualenv_operator_test(item): - for _ in item.iter_markers(name="virtualenv_operator"): - pytest.skip( - f"The test is skipped because it has virtualenv_operator marker. " - f"And --exclude-virtualenv-operator flag is not passed to pytest. {item}" - ) - - -def skip_external_python_operator_test(item): - for _ in item.iter_markers(name="external_python_operator"): - pytest.skip( - f"The test is skipped because it has external_python_operator marker. " - f"And --exclude-external-python-operator flag is not passed to pytest. {item}" - ) - - -def skip_db_test(item): - if next(item.iter_markers(name="db_test"), None): - if next(item.iter_markers(name="non_db_test_override"), None): - # non_db_test can override the db_test set for example on module or class level - return - else: - pytest.skip( - f"The test is skipped as it is DB test " - f"and --skip-db-tests is flag is passed to pytest. {item}" - ) - if next(item.iter_markers(name="backend"), None): - # also automatically skip tests marked with `backend` marker as they are implicitly - # db tests - pytest.skip( - f"The test is skipped as it is DB test " - f"and --skip-db-tests is flag is passed to pytest. {item}" - ) - - -def only_run_db_test(item): - if next(item.iter_markers(name="db_test"), None) and not next( - item.iter_markers(name="non_db_test_override"), None - ): - # non_db_test at individual level can override the db_test set for example on module or class level - return - else: - if next(item.iter_markers(name="backend"), None): - # Also do not skip the tests marked with `backend` marker - as it is implicitly a db test - return - pytest.skip( - f"The test is skipped as it is not a DB tests " - f"and --run-db-tests-only flag is passed to pytest. {item}" - ) - - -def skip_if_integration_disabled(marker, item): - integration_name = marker.args[0] - environment_variable_name = "INTEGRATION_" + integration_name.upper() - environment_variable_value = os.environ.get(environment_variable_name) - if not environment_variable_value or environment_variable_value != "true": - pytest.skip( - f"The test requires {integration_name} integration started and " - f"{environment_variable_name} environment variable to be set to true (it is '{environment_variable_value}')." - f" It can be set by specifying '--integration {integration_name}' at breeze startup" - f": {item}" - ) - - -def skip_if_wrong_backend(marker: pytest.Mark, item: pytest.Item) -> None: - if not (backend_names := marker.args): - reason = ( - "`pytest.mark.backend` expect to get at least one of the following backends: " - f"{', '.join(map(repr, SUPPORTED_DB_BACKENDS))}." - ) - pytest.fail(reason) - elif unsupported_backends := list(filter(lambda b: b not in SUPPORTED_DB_BACKENDS, backend_names)): - reason = ( - "Airflow Tests supports only the following backends in `pytest.mark.backend` marker: " - f"{', '.join(map(repr, SUPPORTED_DB_BACKENDS))}, " - f"but got {', '.join(map(repr, unsupported_backends))}." - ) - pytest.fail(reason) - - env_name = "BACKEND" - if not (backend := os.environ.get(env_name)) or backend not in backend_names: - reason = ( - f"The test {item.nodeid!r} requires one of {', '.join(map(repr, backend_names))} backend started " - f"and {env_name!r} environment variable to be set (currently it set to {backend!r}). " - f"It can be set by specifying backend at breeze startup." - ) - pytest.skip(reason) - - -def skip_if_credential_file_missing(item): - for marker in item.iter_markers(name="credential_file"): - credential_file = marker.args[0] - credential_path = os.path.join(os.environ.get("CREDENTIALS_DIR"), credential_file) - if not os.path.exists(credential_path): - pytest.skip(f"The test requires credential file {credential_path}: {item}") - - -def pytest_runtest_setup(item): - selected_integrations_list = item.config.option.integration - selected_systems_list = item.config.option.system - - include_long_running = item.config.option.include_long_running - include_quarantined = item.config.option.include_quarantined - exclude_virtualenv_operator = item.config.option.exclude_virtualenv_operator - exclude_external_python_operator = item.config.option.exclude_external_python_operator - - for marker in item.iter_markers(name="integration"): - skip_if_integration_disabled(marker, item) - if selected_integrations_list: - skip_if_not_marked_with_integration(selected_integrations_list, item) - if selected_systems_list: - skip_if_not_marked_with_system(selected_systems_list, item) - else: - skip_system_test(item) - for marker in item.iter_markers(name="platform"): - skip_if_platform_doesnt_match(marker) - for marker in item.iter_markers(name="backend"): - skip_if_wrong_backend(marker, item) - skip_if_database_isolation_mode(item) - selected_backend = item.config.option.backend - if selected_backend: - skip_if_not_marked_with_backend(selected_backend, item) - if not include_long_running: - skip_long_running_test(item) - if not include_quarantined: - skip_quarantined_test(item) - if exclude_virtualenv_operator: - skip_virtualenv_operator_test(item) - if exclude_external_python_operator: - skip_external_python_operator_test(item) - if skip_db_tests: - skip_db_test(item) - if run_db_tests_only: - only_run_db_test(item) - skip_if_credential_file_missing(item) - - -@pytest.fixture -def frozen_sleep(monkeypatch): - """Use time-machine to "stub" sleep. - - This means the ``sleep()`` takes no time, but ``datetime.now()`` appears to move forwards. - - If your module under test does ``import time`` and then ``time.sleep``: - - .. code-block:: python - - def test_something(frozen_sleep): - my_mod.fn_under_test() - - If your module under test does ``from time import sleep`` then you will - have to mock that sleep function directly: - - .. code-block:: python - - def test_something(frozen_sleep, monkeypatch): - monkeypatch.setattr("my_mod.sleep", frozen_sleep) - my_mod.fn_under_test() - """ - traveller = None - - def fake_sleep(seconds): - nonlocal traveller - utcnow = datetime.now(tz=timezone.utc) - if traveller is not None: - traveller.stop() - traveller = time_machine.travel(utcnow + timedelta(seconds=seconds)) - traveller.start() - - monkeypatch.setattr("time.sleep", fake_sleep) - yield fake_sleep - - if traveller is not None: - traveller.stop() - - -@pytest.fixture(scope="session") -def app(): - from tests.test_utils.config import conf_vars - - with conf_vars({("fab", "auth_rate_limited"): "False"}): - from airflow.www import app - - yield app.create_app(testing=True) - - -@pytest.fixture -def dag_maker(request): - """Fixture to help create DAG, DagModel, and SerializedDAG automatically. - - You have to use the dag_maker as a context manager and it takes - the same argument as DAG:: - - with dag_maker(dag_id="mydag") as dag: - task1 = EmptyOperator(task_id="mytask") - task2 = EmptyOperator(task_id="mytask2") - - If the DagModel you want to use needs different parameters than the one - automatically created by the dag_maker, you have to update the DagModel as below:: - - dag_maker.dag_model.is_active = False - session.merge(dag_maker.dag_model) - session.commit() - - For any test you use the dag_maker, make sure to create a DagRun:: - - dag_maker.create_dagrun() - - The dag_maker.create_dagrun takes the same arguments as dag.create_dagrun - - If you want to operate on serialized DAGs, then either pass - ``serialized=True`` to the ``dag_maker()`` call, or you can mark your - test/class/file with ``@pytest.mark.need_serialized_dag(True)``. In both of - these cases the ``dag`` returned by the context manager will be a - lazily-evaluated proxy object to the SerializedDAG. - """ - import lazy_object_proxy - - # IMPORTANT: Delay _all_ imports from `airflow.*` to _inside a method_. - # This fixture is "called" early on in the pytest collection process, and - # if we import airflow.* here the wrong (non-test) config will be loaded - # and "baked" in to various constants - - want_serialized = False - - # Allow changing default serialized behaviour with `@pytest.mark.need_serialized_dag` or - # `@pytest.mark.need_serialized_dag(False)` - serialized_marker = request.node.get_closest_marker("need_serialized_dag") - if serialized_marker: - (want_serialized,) = serialized_marker.args or (True,) - - from airflow.utils.log.logging_mixin import LoggingMixin - - class DagFactory(LoggingMixin): - _own_session = False - - def __init__(self): - from airflow.models import DagBag - - # Keep all the serialized dags we've created in this test - self.dagbag = DagBag(os.devnull, include_examples=False, read_dags_from_db=False) - - def __enter__(self): - self.dag.__enter__() - if self.want_serialized: - return lazy_object_proxy.Proxy(self._serialized_dag) - return self.dag - - def _serialized_dag(self): - return self.serialized_model.dag - - def get_serialized_data(self): - try: - data = self.serialized_model.data - except AttributeError: - raise RuntimeError("DAG serialization not requested") - if isinstance(data, str): - return json.loads(data) - return data - - def _bag_dag_compat(self, dag): - # This is a compatibility shim for the old bag_dag method in Airflow <3.0 - # TODO: Remove this when we drop support for Airflow <3.0 in Providers - if hasattr(dag, "parent_dag"): - return self.dagbag.bag_dag(dag, root_dag=dag) - return self.dagbag.bag_dag(dag) - - def __exit__(self, type, value, traceback): - from airflow.models import DagModel - from airflow.models.serialized_dag import SerializedDagModel - - dag = self.dag - dag.__exit__(type, value, traceback) - if type is not None: - return - - dag.clear(session=self.session) - dag.sync_to_db(processor_subdir=self.processor_subdir, session=self.session) - self.dag_model = self.session.get(DagModel, dag.dag_id) - - if self.want_serialized: - self.serialized_model = SerializedDagModel( - dag, processor_subdir=self.dag_model.processor_subdir - ) - self.session.merge(self.serialized_model) - serialized_dag = self._serialized_dag() - self._bag_dag_compat(serialized_dag) - self.session.flush() - else: - self._bag_dag_compat(self.dag) - - def create_dagrun(self, **kwargs): - from airflow.utils import timezone - from airflow.utils.state import State - from airflow.utils.types import DagRunType - from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS - - if AIRFLOW_V_3_0_PLUS: - from airflow.utils.types import DagRunTriggeredByType - - dag = self.dag - kwargs = { - "state": State.RUNNING, - "start_date": self.start_date, - "session": self.session, - **kwargs, - } - # Need to provide run_id if the user does not either provide one - # explicitly, or pass run_type for inference in dag.create_dagrun(). - if "run_id" not in kwargs and "run_type" not in kwargs: - kwargs["run_id"] = "test" - - if "run_type" not in kwargs: - kwargs["run_type"] = DagRunType.from_run_id(kwargs["run_id"]) - if kwargs.get("execution_date") is None: - if kwargs["run_type"] == DagRunType.MANUAL: - kwargs["execution_date"] = self.start_date - else: - kwargs["execution_date"] = dag.next_dagrun_info(None).logical_date - if "data_interval" not in kwargs: - logical_date = timezone.coerce_datetime(kwargs["execution_date"]) - if kwargs["run_type"] == DagRunType.MANUAL: - data_interval = dag.timetable.infer_manual_data_interval(run_after=logical_date) - else: - data_interval = dag.infer_automated_data_interval(logical_date) - kwargs["data_interval"] = data_interval - if AIRFLOW_V_3_0_PLUS and "triggered_by" not in kwargs: - kwargs["triggered_by"] = DagRunTriggeredByType.TEST - - self.dag_run = dag.create_dagrun(**kwargs) - for ti in self.dag_run.task_instances: - ti.refresh_from_task(dag.get_task(ti.task_id)) - if self.want_serialized: - self.session.commit() - return self.dag_run - - def create_dagrun_after(self, dagrun, **kwargs): - next_info = self.dag.next_dagrun_info(self.dag.get_run_data_interval(dagrun)) - if next_info is None: - raise ValueError(f"cannot create run after {dagrun}") - return self.create_dagrun( - execution_date=next_info.logical_date, - data_interval=next_info.data_interval, - **kwargs, - ) - - def __call__( - self, - dag_id="test_dag", - schedule=timedelta(days=1), - serialized=want_serialized, - fileloc=None, - processor_subdir=None, - session=None, - **kwargs, - ): - from airflow import settings - from airflow.models.dag import DAG - from airflow.utils import timezone - - if session is None: - self._own_session = True - session = settings.Session() - - self.kwargs = kwargs - self.session = session - self.start_date = self.kwargs.get("start_date", None) - default_args = kwargs.get("default_args", None) - if default_args and not self.start_date: - if "start_date" in default_args: - self.start_date = default_args.get("start_date") - if not self.start_date: - if hasattr(request.module, "DEFAULT_DATE"): - self.start_date = getattr(request.module, "DEFAULT_DATE") - else: - DEFAULT_DATE = timezone.datetime(2016, 1, 1) - self.start_date = DEFAULT_DATE - self.kwargs["start_date"] = self.start_date - # Set schedule argument to explicitly set value, or a default if no - # other scheduling arguments are set. - self.dag = DAG(dag_id, schedule=schedule, **self.kwargs) - self.dag.fileloc = fileloc or request.module.__file__ - self.want_serialized = serialized - self.processor_subdir = processor_subdir - - return self - - def cleanup(self): - from airflow.models import DagModel, DagRun, TaskInstance, XCom - from airflow.models.serialized_dag import SerializedDagModel - from airflow.models.taskmap import TaskMap - from airflow.utils.retries import run_with_db_retries - from tests.test_utils.compat import AssetEvent - - for attempt in run_with_db_retries(logger=self.log): - with attempt: - dag_ids = list(self.dagbag.dag_ids) - if not dag_ids: - return - # To isolate problems here with problems from elsewhere on the session object - self.session.rollback() - - self.session.query(SerializedDagModel).filter( - SerializedDagModel.dag_id.in_(dag_ids) - ).delete(synchronize_session=False) - self.session.query(DagRun).filter(DagRun.dag_id.in_(dag_ids)).delete( - synchronize_session=False, - ) - self.session.query(TaskInstance).filter(TaskInstance.dag_id.in_(dag_ids)).delete( - synchronize_session=False, - ) - self.session.query(XCom).filter(XCom.dag_id.in_(dag_ids)).delete( - synchronize_session=False, - ) - self.session.query(DagModel).filter(DagModel.dag_id.in_(dag_ids)).delete( - synchronize_session=False, - ) - self.session.query(TaskMap).filter(TaskMap.dag_id.in_(dag_ids)).delete( - synchronize_session=False, - ) - self.session.query(AssetEvent).filter(AssetEvent.source_dag_id.in_(dag_ids)).delete( - synchronize_session=False, - ) - self.session.commit() - if self._own_session: - self.session.expunge_all() - - factory = DagFactory() - - try: - yield factory - finally: - factory.cleanup() - with suppress(AttributeError): - del factory.session - - -@pytest.fixture -def create_dummy_dag(dag_maker): - """Create a `DAG` with a single `EmptyOperator` task. - - DagRun and DagModel is also created. - - Apart from the already existing arguments, any other argument in kwargs - is passed to the DAG and not to the EmptyOperator task. - - If you have an argument that you want to pass to the EmptyOperator that - is not here, please use `default_args` so that the DAG will pass it to the - Task:: - - dag, task = create_dummy_dag(default_args={"start_date": timezone.datetime(2016, 1, 1)}) - - You cannot be able to alter the created DagRun or DagModel, use `dag_maker` fixture instead. - """ - from airflow.operators.empty import EmptyOperator - from airflow.utils.types import DagRunType - - def create_dag( - dag_id="dag", - task_id="op1", - task_display_name=None, - max_active_tis_per_dag=16, - max_active_tis_per_dagrun=None, - pool="default_pool", - executor_config=None, - trigger_rule="all_done", - on_success_callback=None, - on_execute_callback=None, - on_failure_callback=None, - on_retry_callback=None, - email=None, - with_dagrun_type=DagRunType.SCHEDULED, - **kwargs, - ): - op_kwargs = {} - from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS - - if AIRFLOW_V_2_9_PLUS: - op_kwargs["task_display_name"] = task_display_name - with dag_maker(dag_id, **kwargs) as dag: - op = EmptyOperator( - task_id=task_id, - max_active_tis_per_dag=max_active_tis_per_dag, - max_active_tis_per_dagrun=max_active_tis_per_dagrun, - executor_config=executor_config or {}, - on_success_callback=on_success_callback, - on_execute_callback=on_execute_callback, - on_failure_callback=on_failure_callback, - on_retry_callback=on_retry_callback, - email=email, - pool=pool, - trigger_rule=trigger_rule, - **op_kwargs, - ) - if with_dagrun_type is not None: - dag_maker.create_dagrun(run_type=with_dagrun_type) - return dag, op - - return create_dag - - -if TYPE_CHECKING: - from airflow.models.taskinstance import TaskInstance - - -@pytest.fixture -def create_task_instance(dag_maker, create_dummy_dag): - """Create a TaskInstance, and associated DB rows (DagRun, DagModel, etc). - - Uses ``create_dummy_dag`` to create the dag structure. - """ - from airflow.operators.empty import EmptyOperator - - def maker( - execution_date=None, - dagrun_state=None, - state=None, - run_id=None, - run_type=None, - data_interval=None, - external_executor_id=None, - dag_id="dag", - task_id="op1", - task_display_name=None, - max_active_tis_per_dag=16, - max_active_tis_per_dagrun=None, - pool="default_pool", - executor_config=None, - trigger_rule="all_done", - on_success_callback=None, - on_execute_callback=None, - on_failure_callback=None, - on_retry_callback=None, - email=None, - map_index=-1, - **kwargs, - ) -> TaskInstance: - from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS - - if AIRFLOW_V_3_0_PLUS: - from airflow.utils.types import DagRunTriggeredByType - - if execution_date is None: - from airflow.utils import timezone - - execution_date = timezone.utcnow() - with dag_maker(dag_id, **kwargs): - op_kwargs = {} - from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS - - if AIRFLOW_V_2_9_PLUS: - op_kwargs["task_display_name"] = task_display_name - task = EmptyOperator( - task_id=task_id, - max_active_tis_per_dag=max_active_tis_per_dag, - max_active_tis_per_dagrun=max_active_tis_per_dagrun, - executor_config=executor_config or {}, - on_success_callback=on_success_callback, - on_execute_callback=on_execute_callback, - on_failure_callback=on_failure_callback, - on_retry_callback=on_retry_callback, - email=email, - pool=pool, - trigger_rule=trigger_rule, - **op_kwargs, - ) - - dagrun_kwargs = { - "execution_date": execution_date, - "state": dagrun_state, - } - dagrun_kwargs.update({"triggered_by": DagRunTriggeredByType.TEST} if AIRFLOW_V_3_0_PLUS else {}) - if run_id is not None: - dagrun_kwargs["run_id"] = run_id - if run_type is not None: - dagrun_kwargs["run_type"] = run_type - if data_interval is not None: - dagrun_kwargs["data_interval"] = data_interval - dagrun = dag_maker.create_dagrun(**dagrun_kwargs) - (ti,) = dagrun.task_instances - ti.task = task - ti.state = state - ti.external_executor_id = external_executor_id - ti.map_index = map_index - - dag_maker.session.flush() - return ti - - return maker - - -@pytest.fixture -def create_serialized_task_instance_of_operator(dag_maker): - def _create_task_instance( - operator_class, - *, - dag_id, - execution_date=None, - session=None, - **operator_kwargs, - ) -> TaskInstance: - with dag_maker(dag_id=dag_id, serialized=True, session=session): - operator_class(**operator_kwargs) - if execution_date is None: - dagrun_kwargs = {} - else: - dagrun_kwargs = {"execution_date": execution_date} - (ti,) = dag_maker.create_dagrun(**dagrun_kwargs).task_instances - return ti - - return _create_task_instance - - -@pytest.fixture -def create_task_instance_of_operator(dag_maker): - def _create_task_instance( - operator_class, - *, - dag_id, - execution_date=None, - session=None, - **operator_kwargs, - ) -> TaskInstance: - with dag_maker(dag_id=dag_id, session=session, serialized=True): - operator_class(**operator_kwargs) - if execution_date is None: - dagrun_kwargs = {} - else: - dagrun_kwargs = {"execution_date": execution_date} - (ti,) = dag_maker.create_dagrun(**dagrun_kwargs).task_instances - return ti - - return _create_task_instance - - -@pytest.fixture -def create_task_of_operator(dag_maker): - def _create_task_of_operator(operator_class, *, dag_id, session=None, **operator_kwargs): - with dag_maker(dag_id=dag_id, session=session): - task = operator_class(**operator_kwargs) - return task - - return _create_task_of_operator - - -@pytest.fixture -def session(): - from airflow.utils.session import create_session - - with create_session() as session: - yield session - session.rollback() - - -@pytest.fixture -def get_test_dag(): - def _get(dag_id): - from airflow.models.dagbag import DagBag - from airflow.models.serialized_dag import SerializedDagModel - - dag_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), "dags", f"{dag_id}.py") - dagbag = DagBag(dag_folder=dag_file, include_examples=False) - - dag = dagbag.get_dag(dag_id) - dag.sync_to_db() - SerializedDagModel.write_dag(dag) - - return dag - - return _get - - -@pytest.fixture -def create_log_template(request): - from airflow import settings - from airflow.models.tasklog import LogTemplate - - session = settings.Session() - - def _create_log_template(filename_template, elasticsearch_id=""): - log_template = LogTemplate(filename=filename_template, elasticsearch_id=elasticsearch_id) - session.add(log_template) - session.commit() - - def _delete_log_template(): - from airflow.models import DagRun, TaskInstance - - session.query(TaskInstance).delete() - session.query(DagRun).delete() - session.delete(log_template) - session.commit() - - request.addfinalizer(_delete_log_template) - - return _create_log_template - - -@pytest.fixture -def reset_logging_config(): - import logging.config - - from airflow import settings - from airflow.utils.module_loading import import_string - - logging_config = import_string(settings.LOGGING_CLASS_PATH) - logging.config.dictConfig(logging_config) - - -@pytest.fixture(scope="session", autouse=True) -def suppress_info_logs_for_dag_and_fab(): - import logging - - from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS - - dag_logger = logging.getLogger("airflow.models.dag") - dag_logger.setLevel(logging.WARNING) - - if AIRFLOW_V_2_9_PLUS: - fab_logger = logging.getLogger("airflow.providers.fab.auth_manager.security_manager.override") - fab_logger.setLevel(logging.WARNING) - else: - fab_logger = logging.getLogger("airflow.www.fab_security") - fab_logger.setLevel(logging.WARNING) - - -@pytest.fixture(scope="module", autouse=True) -def _clear_db(request): - """Clear DB before each test module run.""" - from tests.test_utils.db import clear_all - - if not request.config.option.db_cleanup: - return - if skip_db_tests: - return - from airflow.configuration import conf - - sql_alchemy_conn = conf.get("database", "sql_alchemy_conn") - if sql_alchemy_conn.startswith("sqlite"): - sql_alchemy_file = sql_alchemy_conn.replace("sqlite:///", "") - if not os.path.exists(sql_alchemy_file): - print(f"The sqlite file `{sql_alchemy_file}` does not exist. Attempt to initialize it.") - initial_db_init() - - dist_option = getattr(request.config.option, "dist", "no") - if dist_option != "no" or hasattr(request.config, "workerinput"): - # Skip if pytest-xdist detected (controller or worker) - return - try: - clear_all() - except Exception as ex: - exc_name_parts = [type(ex).__name__] - exc_module = type(ex).__module__ - if exc_module != "builtins": - exc_name_parts.insert(0, exc_module) - extra_msg = "" if request.config.option.db_init else ", try to run with flag --with-db-init" - pytest.exit(f"Unable clear test DB{extra_msg}, got error {'.'.join(exc_name_parts)}: {ex}") - - -@pytest.fixture(autouse=True) -def clear_lru_cache(): - from airflow.executors.executor_loader import ExecutorLoader - from airflow.utils.entry_points import _get_grouped_entry_points - - ExecutorLoader.validate_database_executor_compatibility.cache_clear() - try: - _get_grouped_entry_points.cache_clear() - try: - yield - finally: - _get_grouped_entry_points.cache_clear() - finally: - ExecutorLoader.validate_database_executor_compatibility.cache_clear() - - -@pytest.fixture(autouse=True) -def refuse_to_run_test_from_wrongly_named_files(request: pytest.FixtureRequest): - filepath = request.node.path - is_system_test: bool = "tests/system/" in os.fspath(filepath) - test_name = request.node.name - if request.node.cls: - test_name = f"{request.node.cls.__name__}.{test_name}" - if is_system_test and not filepath.name.startswith(("example_", "test_")): - pytest.fail( - f"All test method files in tests/system must start with 'example_' or 'test_'. " - f"Seems that {os.fspath(filepath)!r} contains {test_name!r} that looks like a test case. " - f"Please rename the file to follow the example_* or test_* pattern if you want to run the tests " - f"in it." - ) - elif not is_system_test and not filepath.name.startswith("test_"): - pytest.fail( - f"All test method files in tests/ must start with 'test_'. Seems that {os.fspath(filepath)!r} " - f"contains {test_name!r} that looks like a test case. Please rename the file to " - f"follow the test_* pattern if you want to run the tests in it." - ) - - -@pytest.fixture(autouse=True) -def initialize_providers_manager(): - from airflow.providers_manager import ProvidersManager - - ProvidersManager().initialize_providers_configuration() - - -@pytest.fixture(autouse=True) -def close_all_sqlalchemy_sessions(): - from sqlalchemy.orm import close_all_sessions - - with suppress(Exception): - close_all_sessions() - yield - with suppress(Exception): - close_all_sessions() - - -@pytest.fixture -def cleanup_providers_manager(): - from airflow.providers_manager import ProvidersManager - - ProvidersManager()._cleanup() - ProvidersManager().initialize_providers_configuration() - try: - yield - finally: - ProvidersManager()._cleanup() - - -@pytest.fixture(autouse=True) -def _disable_redact(request: pytest.FixtureRequest, mocker): - """Disable redacted text in tests, except specific.""" - from airflow import settings - - if next(request.node.iter_markers("enable_redact"), None): - with pytest.MonkeyPatch.context() as mp_ctx: - mp_ctx.setattr(settings, "MASK_SECRETS_IN_LOGS", True) - yield - return - - mocked_redact = mocker.patch("airflow.utils.log.secrets_masker.SecretsMasker.redact") - mocked_redact.side_effect = lambda item, name=None, max_depth=None: item - with pytest.MonkeyPatch.context() as mp_ctx: - mp_ctx.setattr(settings, "MASK_SECRETS_IN_LOGS", False) - yield - return - - -@pytest.fixture -def airflow_root_path() -> Path: - import airflow - - return Path(airflow.__path__[0]).parent - - -@pytest.fixture -def hook_lineage_collector(): - from airflow.lineage import hook - - hook._hook_lineage_collector = None - hook._hook_lineage_collector = hook.HookLineageCollector() - yield hook.get_hook_lineage_collector() - hook._hook_lineage_collector = None - - -# This constant is set to True if tests are run with Airflow installed from Packages rather than running -# the tests within Airflow sources. While most tests in CI are run using Airflow sources, there are -# also compatibility tests that only use `tests` package and run against installed packages of Airflow in -# for supported Airflow versions. -RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES = not (Path(__file__).parents[1] / "airflow" / "__init__.py").exists() - - if TYPE_CHECKING: # Static checkers do not know about pytest fixtures' types and return, # In case if them distributed through third party packages. @@ -1489,15 +94,3 @@ def requests_mock() -> RequestsMockFixture: ... # time-machine @pytest.fixture # type: ignore[no-redef] def time_machine() -> TimeMachineFixture: ... - - -@pytest.fixture -def clean_dags_and_dagruns(): - """Fixture that cleans the database before and after every test.""" - from tests.test_utils.db import clear_db_dags, clear_db_runs - - clear_db_runs() - clear_db_dags() - yield # Test runs here - clear_db_dags() - clear_db_runs() diff --git a/tests/core/test_configuration.py b/tests/core/test_configuration.py index 1590fb7ee231c..0690a5f0182dc 100644 --- a/tests/core/test_configuration.py +++ b/tests/core/test_configuration.py @@ -42,8 +42,6 @@ write_default_airflow_configuration_if_needed, ) from airflow.providers_manager import ProvidersManager -from tests.test_utils.config import conf_vars -from tests.test_utils.reset_warning_registry import reset_warning_registry from tests.utils.test_config import ( remove_all_configurations, set_deprecated_options, @@ -51,6 +49,9 @@ use_config, ) +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.reset_warning_registry import reset_warning_registry + HOME_DIR = os.path.expanduser("~") # The conf has been updated with sql_alchemy_con and deactivate_stale_dags_interval to test the diff --git a/tests/core/test_core.py b/tests/core/test_core.py index d44235f955243..13b85a9ef1700 100644 --- a/tests/core/test_core.py +++ b/tests/core/test_core.py @@ -32,7 +32,8 @@ from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_task_fail + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_task_fail pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/core/test_example_dags_system.py b/tests/core/test_example_dags_system.py index 48a936f794f99..c60b7325b125d 100644 --- a/tests/core/test_example_dags_system.py +++ b/tests/core/test_example_dags_system.py @@ -19,7 +19,7 @@ import pytest -from tests.test_utils.system_tests_class import SystemTest +from dev.tests_common.test_utils.system_tests_class import SystemTest @pytest.mark.system("core") diff --git a/tests/core/test_impersonation_tests.py b/tests/core/test_impersonation_tests.py index 721d180ce7d3f..cf9359c863372 100644 --- a/tests/core/test_impersonation_tests.py +++ b/tests/core/test_impersonation_tests.py @@ -35,7 +35,8 @@ from airflow.utils.state import State from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.test_utils import db + +from dev.tests_common.test_utils import db # The entire module into the quarantined mark, this might have unpredictable side effects to other tests # and should be moved into the isolated environment into the future. diff --git a/tests/core/test_logging_config.py b/tests/core/test_logging_config.py index c972532fe2ceb..a316130ecdabe 100644 --- a/tests/core/test_logging_config.py +++ b/tests/core/test_logging_config.py @@ -29,7 +29,8 @@ import pytest from airflow.configuration import conf -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars SETTINGS_FILE_VALID = """ LOGGING_CONFIG = { diff --git a/tests/core/test_otel_tracer.py b/tests/core/test_otel_tracer.py index db675dd4293ec..b9612c49dae92 100644 --- a/tests/core/test_otel_tracer.py +++ b/tests/core/test_otel_tracer.py @@ -26,7 +26,8 @@ from airflow.traces import TRACEPARENT, TRACESTATE, otel_tracer, utils from airflow.traces.tracer import Trace -from tests.test_utils.config import env_vars + +from dev.tests_common.test_utils.config import env_vars @pytest.fixture diff --git a/tests/core/test_sentry.py b/tests/core/test_sentry.py index b9a1ff7af8d7a..c67b49980aae2 100644 --- a/tests/core/test_sentry.py +++ b/tests/core/test_sentry.py @@ -30,7 +30,8 @@ from airflow.utils import timezone from airflow.utils.module_loading import import_string from airflow.utils.state import State -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars EXECUTION_DATE = timezone.utcnow() SCHEDULE_INTERVAL = datetime.timedelta(days=1) diff --git a/tests/core/test_settings.py b/tests/core/test_settings.py index cdcf52df16d84..619d64630029d 100644 --- a/tests/core/test_settings.py +++ b/tests/core/test_settings.py @@ -33,7 +33,8 @@ from airflow.exceptions import AirflowClusterPolicyViolation, AirflowConfigException from airflow.settings import _ENABLE_AIP_44, TracebackSession, is_usage_data_collection_enabled from airflow.utils.session import create_session -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars SETTINGS_FILE_POLICY = """ def test_policy(task_instance): diff --git a/tests/core/test_sqlalchemy_config.py b/tests/core/test_sqlalchemy_config.py index 154eb0a5f8931..5fed8745fd085 100644 --- a/tests/core/test_sqlalchemy_config.py +++ b/tests/core/test_sqlalchemy_config.py @@ -25,7 +25,8 @@ from airflow import settings from airflow.api_internal.internal_api_call import InternalApiConfig from airflow.exceptions import AirflowConfigException -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars SQL_ALCHEMY_CONNECT_ARGS = {"test": 43503, "dict": {"is": 1, "supported": "too"}} diff --git a/tests/core/test_stats.py b/tests/core/test_stats.py index ec7ff0cab08c5..9a218010a189e 100644 --- a/tests/core/test_stats.py +++ b/tests/core/test_stats.py @@ -36,7 +36,8 @@ PatternAllowListValidator, PatternBlockListValidator, ) -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars class CustomStatsd(statsd.StatsClient): diff --git a/tests/dag_processing/test_job_runner.py b/tests/dag_processing/test_job_runner.py index 1d3fefdf12d5f..81b94dd18b18a 100644 --- a/tests/dag_processing/test_job_runner.py +++ b/tests/dag_processing/test_job_runner.py @@ -60,9 +60,10 @@ from airflow.utils.session import create_session from tests.core.test_logging_config import SETTINGS_FILE_VALID, settings_context from tests.models import TEST_DAGS_FOLDER -from tests.test_utils.compat import ParseImportError -from tests.test_utils.config import conf_vars -from tests.test_utils.db import ( + +from dev.tests_common.test_utils.compat import ParseImportError +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import ( clear_db_callbacks, clear_db_dags, clear_db_import_errors, diff --git a/tests/dag_processing/test_processor.py b/tests/dag_processing/test_processor.py index d7b2b2116653e..c3a0f5a26e329 100644 --- a/tests/dag_processing/test_processor.py +++ b/tests/dag_processing/test_processor.py @@ -39,10 +39,11 @@ from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.types import DagRunType -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, ParseImportError -from tests.test_utils.config import conf_vars, env_vars -from tests.test_utils.db import ( + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, ParseImportError +from dev.tests_common.test_utils.config import conf_vars, env_vars +from dev.tests_common.test_utils.db import ( clear_db_dags, clear_db_import_errors, clear_db_jobs, @@ -50,7 +51,7 @@ clear_db_runs, clear_db_serialized_dags, ) -from tests.test_utils.mock_executor import MockExecutor +from dev.tests_common.test_utils.mock_executor import MockExecutor if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/dags/test_miscellaneous.py b/tests/dags/test_miscellaneous.py index 4a2c6b56a365d..b08e61e3bbddc 100644 --- a/tests/dags/test_miscellaneous.py +++ b/tests/dags/test_miscellaneous.py @@ -23,7 +23,8 @@ from airflow.models.dag import DAG from airflow.operators.empty import EmptyOperator -from tests.test_utils.compat import BashOperator + +from dev.tests_common.test_utils.compat import BashOperator args = { "owner": "airflow", diff --git a/tests/dags/test_sensor.py b/tests/dags/test_sensor.py index 07c9cc7efdffb..7c96160402663 100644 --- a/tests/dags/test_sensor.py +++ b/tests/dags/test_sensor.py @@ -21,7 +21,8 @@ from airflow.decorators import task from airflow.models.dag import DAG from airflow.utils import timezone -from tests.test_utils.compat import DateTimeSensor + +from dev.tests_common.test_utils.compat import DateTimeSensor with DAG( dag_id="test_sensor", start_date=datetime.datetime(2022, 1, 1), catchup=False, schedule="@once" diff --git a/tests/decorators/test_bash.py b/tests/decorators/test_bash.py index ba8948936eda1..da79fb4cca0fb 100644 --- a/tests/decorators/test_bash.py +++ b/tests/decorators/test_bash.py @@ -29,7 +29,8 @@ from airflow.models.renderedtifields import RenderedTaskInstanceFields from airflow.utils import timezone from airflow.utils.types import NOTSET -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields DEFAULT_DATE = timezone.datetime(2023, 1, 1) diff --git a/tests/decorators/test_python.py b/tests/decorators/test_python.py index adbf96a0f41ba..d34fbbf552b30 100644 --- a/tests/decorators/test_python.py +++ b/tests/decorators/test_python.py @@ -40,7 +40,8 @@ from airflow.utils.types import DagRunType from airflow.utils.xcom import XCOM_RETURN_KEY from tests.operators.test_python import BasePythonTest -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/deprecations_ignore.yml b/tests/deprecations_ignore.yml index 8be939227d814..c3d5ddd8eafa8 100644 --- a/tests/deprecations_ignore.yml +++ b/tests/deprecations_ignore.yml @@ -51,116 +51,3 @@ - tests/www/views/test_views_rendered.py::test_rendered_task_detail_env_secret - tests/www/views/test_views_tasks.py::test_rendered_task_view - tests/www/views/test_views_tasks.py::test_views_get - - -# Providers -- tests/providers/amazon/aws/deferrable/hooks/test_base_aws.py::TestAwsBaseAsyncHook::test_get_client_async -- tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_cluster_status -- tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_get_cluster_status -- tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_get_cluster_status_exception -- tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_pause_cluster -- tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_resume_cluster -- tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py::TestRedshiftAsyncHook::test_resume_cluster_exception -- tests/providers/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_exception -- tests/providers/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_resuming_status -- tests/providers/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_success -- tests/providers/google/common/auth_backend/test_google_openid.py::TestGoogleOpenID::test_success -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_api_resource_configs -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_api_resource_configs_duplication_warning -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_queries -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_query_cancel_completed -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_query_cancel_timeout -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_cancel_query_jobs_to_cancel -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_get_dataset_tables_list -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_invalid_schema_update_and_write_disposition -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_invalid_schema_update_options -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_invalid_source_format -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_extract -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_load_with_non_csv_as_src_fmt -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_schema_update_options -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_schema_update_options_incorrect -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect_default -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect_legacy_with_query_params -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_sql_dialect_legacy_with_query_params_fails -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_with_arg -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_query_without_sql_fails -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookMethods::test_run_table_delete -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryHookRunWithConfiguration::test_run_with_configuration_location -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_create_external_table_with_kms -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_run_copy_with_kms -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_run_load_with_kms -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithKMS::test_run_query_with_kms -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_create_external_table_description -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_create_external_table_labels -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_run_load_description -- tests/providers/google/cloud/hooks/test_bigquery.py::TestBigQueryWithLabelsAndDescription::test_run_load_labels -- tests/providers/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_load_default -- tests/providers/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_load_with_arg -- tests/providers/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_query_default -- tests/providers/google/cloud/hooks/test_bigquery.py::TestClusteringInRunJob::test_run_query_with_arg -- tests/providers/google/cloud/hooks/test_bigquery.py::TestDatasetsOperations::test_patch_dataset -- tests/providers/google/cloud/hooks/test_bigquery.py::TestTableOperations::test_patch_table -- tests/providers/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_load_default -- tests/providers/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_load_with_arg -- tests/providers/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_query_with_arg -- tests/providers/google/cloud/hooks/test_bigquery.py::TestTimePartitioningInRunJob::test_run_with_auto_detect -- tests/providers/google/cloud/hooks/test_gcs.py::TestGCSHook::test_list__error_match_glob_and_invalid_delimiter -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_error_operation -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_life_science_client_creation -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_run_pipeline_immediately_complete -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithDefaultProjectIdFromConnection::test_waiting_operation -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_delegate_to_runtime_error -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_error_operation -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_life_science_client_creation -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_location_path -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_run_pipeline_immediately_complete -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithPassedProjectId::test_waiting_operation -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithoutProjectId::test_life_science_client_creation -- tests/providers/google/cloud/hooks/test_life_sciences.py::TestLifeSciencesHookWithoutProjectId::test_run_pipeline -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_cancel_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_create_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_delete_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_get_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithDefaultProjectIdHook::test_list_pipeline_jobs -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_cancel_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_create_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_delete_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_get_pipeline_job -- tests/providers/google/cloud/hooks/vertex_ai/test_custom_job.py::TestCustomJobWithoutDefaultProjectIdHook::test_list_pipeline_jobs -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcHadoopOperator::test_execute -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcHiveOperator::test_builder -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcHiveOperator::test_execute -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcPigOperator::test_builder -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcPigOperator::test_execute -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcPySparkOperator::test_execute -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcSparkOperator::test_execute -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcSparkSqlOperator::test_builder -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcSparkSqlOperator::test_execute -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataProcSparkSqlOperator::test_execute_override_project_id -- tests/providers/google/cloud/operators/test_dataproc.py::TestDataprocClusterScaleOperator::test_execute -- tests/providers/google/cloud/operators/test_dataproc.py::test_create_cluster_operator_extra_links -- tests/providers/google/cloud/operators/test_dataproc.py::test_scale_cluster_operator_extra_links -- tests/providers/google/cloud/operators/test_dataproc.py::test_submit_spark_job_operator_extra_links -- tests/providers/google/cloud/operators/test_gcs.py::TestGoogleCloudStorageListOperator::test_execute__delimiter -- tests/providers/google/cloud/operators/test_kubernetes_engine.py::TestGoogleCloudPlatformContainerOperator::test_create_execute_error_body -- tests/providers/google/cloud/operators/test_life_sciences.py::TestLifeSciencesRunPipelineOperator::test_executes -- tests/providers/google/cloud/operators/test_life_sciences.py::TestLifeSciencesRunPipelineOperator::test_executes_without_project_id -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_copy_files_into_a_folder -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_last_modified_time -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_more_than_1_wildcard -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_no_prefix -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_no_suffix -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_prefix_and_suffix -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_empty_destination_object -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_destination_object -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_destination_object_retained_prefix -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_replace_flag_false -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_with_replace_flag_false_with_destination_object -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_execute_wildcard_without_destination_object -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_executes_with_a_delimiter -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_executes_with_delimiter_and_destination_object -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_executes_with_different_delimiter_and_destination_object -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_wc_with_last_modified_time_with_all_true_cond -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_wc_with_last_modified_time_with_one_true_cond -- tests/providers/google/cloud/transfers/test_gcs_to_gcs.py::TestGoogleCloudStorageToCloudStorageOperator::test_wc_with_no_last_modified_time diff --git a/tests/executors/test_base_executor.py b/tests/executors/test_base_executor.py index 04a70c5e83689..da7422737ac4a 100644 --- a/tests/executors/test_base_executor.py +++ b/tests/executors/test_base_executor.py @@ -145,7 +145,7 @@ def test_gauge_executor_metrics_single_executor(mock_stats_gauge, mock_trigger_t @mock.patch("airflow.executors.sequential_executor.SequentialExecutor.sync") @mock.patch("airflow.executors.base_executor.BaseExecutor.trigger_tasks") @mock.patch("airflow.executors.base_executor.Stats.gauge") -@mock.patch("airflow.executors.executor_loader.ExecutorLoader.get_executor_names") +@mock.patch("airflow.executors.base_executor.ExecutorLoader.get_executor_names") def test_gauge_executor_metrics_with_multiple_executors( mock_get_executor_names, mock_stats_gauge, diff --git a/tests/executors/test_executor_loader.py b/tests/executors/test_executor_loader.py index 2192487a01cf8..d9bf81dd3116d 100644 --- a/tests/executors/test_executor_loader.py +++ b/tests/executors/test_executor_loader.py @@ -29,7 +29,8 @@ from airflow.executors.local_executor import LocalExecutor from airflow.providers.amazon.aws.executors.ecs.ecs_executor import AwsEcsExecutor from airflow.providers.celery.executors.celery_executor import CeleryExecutor -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.skip_if_database_isolation_mode diff --git a/tests/integration/cli/commands/test_celery_command.py b/tests/integration/cli/commands/test_celery_command.py index 61866184337d0..186addedb1164 100644 --- a/tests/integration/cli/commands/test_celery_command.py +++ b/tests/integration/cli/commands/test_celery_command.py @@ -25,7 +25,8 @@ from airflow.cli import cli_parser from airflow.cli.commands import celery_command from airflow.executors import executor_loader -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars @pytest.mark.integration("celery") diff --git a/tests/integration/executors/test_celery_executor.py b/tests/integration/executors/test_celery_executor.py index 4ec1cc458c3a5..169d3e91356c9 100644 --- a/tests/integration/executors/test_celery_executor.py +++ b/tests/integration/executors/test_celery_executor.py @@ -45,7 +45,8 @@ from airflow.models.taskinstancekey import TaskInstanceKey from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.state import State, TaskInstanceState -from tests.test_utils import db + +from dev.tests_common.test_utils import db logger = logging.getLogger(__name__) diff --git a/tests/integration/security/test_kerberos.py b/tests/integration/security/test_kerberos.py index 033b455b56ea7..d16b8bc332c7d 100644 --- a/tests/integration/security/test_kerberos.py +++ b/tests/integration/security/test_kerberos.py @@ -26,7 +26,8 @@ from airflow.security import kerberos from airflow.security.kerberos import renew_from_kt -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars @pytest.mark.integration("kerberos") diff --git a/tests/jobs/test_backfill_job.py b/tests/jobs/test_backfill_job.py index 2a5a6689a98f1..f1bb6f17d05c4 100644 --- a/tests/jobs/test_backfill_job.py +++ b/tests/jobs/test_backfill_job.py @@ -59,16 +59,17 @@ from airflow.utils.types import DagRunType from tests.listeners import dag_listener from tests.models import TEST_DAGS_FOLDER -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import ( + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import ( clear_db_dags, clear_db_pools, clear_db_runs, clear_db_xcom, set_default_pool_slots, ) -from tests.test_utils.mock_executor import MockExecutor +from dev.tests_common.test_utils.mock_executor import MockExecutor if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/jobs/test_base_job.py b/tests/jobs/test_base_job.py index e9c9fe94ce737..20079f4f235e5 100644 --- a/tests/jobs/test_base_job.py +++ b/tests/jobs/test_base_job.py @@ -33,9 +33,10 @@ from airflow.utils.session import create_session from airflow.utils.state import State from tests.listeners import lifecycle_listener -from tests.test_utils.config import conf_vars from tests.utils.test_helpers import MockJobRunner, SchedulerJobRunner, TriggererJobRunner +from dev.tests_common.test_utils.config import conf_vars + if TYPE_CHECKING: from airflow.serialization.pydantic.job import JobPydantic diff --git a/tests/jobs/test_local_task_job.py b/tests/jobs/test_local_task_job.py index 1a0e186264a72..577fddd84d2b7 100644 --- a/tests/jobs/test_local_task_job.py +++ b/tests/jobs/test_local_task_job.py @@ -52,11 +52,12 @@ from airflow.utils.state import State from airflow.utils.timeout import timeout from airflow.utils.types import DagRunType -from tests.test_utils import db -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.mock_executor import MockExecutor + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.mock_executor import MockExecutor if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index bc2be0a12c743..a5748ebaeef9c 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -72,10 +72,12 @@ from tests.listeners import dag_listener from tests.listeners.test_listeners import get_listener_manager from tests.models import TEST_DAGS_FOLDER -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars, env_vars -from tests.test_utils.db import ( +from tests.utils.test_timezone import UTC + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars, env_vars +from dev.tests_common.test_utils.db import ( clear_db_assets, clear_db_backfills, clear_db_dags, @@ -87,9 +89,8 @@ clear_db_sla_miss, set_default_pool_slots, ) -from tests.test_utils.mock_executor import MockExecutor -from tests.test_utils.mock_operators import CustomOperator -from tests.utils.test_timezone import UTC +from dev.tests_common.test_utils.mock_executor import MockExecutor +from dev.tests_common.test_utils.mock_operators import CustomOperator if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/jobs/test_triggerer_job.py b/tests/jobs/test_triggerer_job.py index 84b422342e4ce..da8405e197317 100644 --- a/tests/jobs/test_triggerer_job.py +++ b/tests/jobs/test_triggerer_job.py @@ -47,7 +47,8 @@ from airflow.utils.state import State, TaskInstanceState from airflow.utils.types import DagRunType from tests.core.test_logging_config import reset_logging -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/jobs/test_triggerer_job_logging.py b/tests/jobs/test_triggerer_job_logging.py index a039c43fb48ca..4f39047f050ee 100644 --- a/tests/jobs/test_triggerer_job_logging.py +++ b/tests/jobs/test_triggerer_job_logging.py @@ -30,7 +30,8 @@ from airflow.utils.log.file_task_handler import FileTaskHandler from airflow.utils.log.logging_mixin import RedirectStdHandler from airflow.utils.log.trigger_handler import DropTriggerLogsFilter, TriggererHandlerWrapper -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars def non_pytest_handlers(val): diff --git a/tests/lineage/test_hook.py b/tests/lineage/test_hook.py index c076b19aecedd..3fbbfaa021ffa 100644 --- a/tests/lineage/test_hook.py +++ b/tests/lineage/test_hook.py @@ -33,7 +33,8 @@ NoOpCollector, get_hook_lineage_collector, ) -from tests.test_utils.mock_plugins import mock_plugin_manager + +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager class TestHookLineageCollector: diff --git a/tests/lineage/test_lineage.py b/tests/lineage/test_lineage.py index c99c97ceb4e0c..ac42b2ee7f814 100644 --- a/tests/lineage/test_lineage.py +++ b/tests/lineage/test_lineage.py @@ -30,7 +30,8 @@ from airflow.utils import timezone from airflow.utils.context import Context from airflow.utils.types import DagRunType -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/listeners/class_listener.py b/tests/listeners/class_listener.py index ececa853213a5..2b9cefa178d7d 100644 --- a/tests/listeners/class_listener.py +++ b/tests/listeners/class_listener.py @@ -19,7 +19,8 @@ from airflow.listeners import hookimpl from airflow.utils.state import DagRunState, TaskInstanceState -from tests.test_utils.compat import AIRFLOW_V_2_10_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS if AIRFLOW_V_2_10_PLUS: diff --git a/tests/listeners/test_dag_import_error_listener.py b/tests/listeners/test_dag_import_error_listener.py index 57fd4f79dd526..886044d27b44e 100644 --- a/tests/listeners/test_dag_import_error_listener.py +++ b/tests/listeners/test_dag_import_error_listener.py @@ -32,8 +32,9 @@ from airflow.models.errors import ParseImportError from airflow.utils import timezone from tests.listeners import dag_import_error_listener -from tests.test_utils.config import conf_vars, env_vars -from tests.test_utils.db import ( + +from dev.tests_common.test_utils.config import conf_vars, env_vars +from dev.tests_common.test_utils.db import ( clear_db_dags, clear_db_import_errors, clear_db_jobs, @@ -42,7 +43,7 @@ clear_db_serialized_dags, clear_db_sla_miss, ) -from tests.test_utils.mock_executor import MockExecutor +from dev.tests_common.test_utils.mock_executor import MockExecutor pytestmark = pytest.mark.db_test diff --git a/tests/models/test_backfill.py b/tests/models/test_backfill.py index c45625db335de..0f471fbd56546 100644 --- a/tests/models/test_backfill.py +++ b/tests/models/test_backfill.py @@ -33,7 +33,13 @@ ) from airflow.operators.python import PythonOperator from airflow.utils.state import DagRunState -from tests.test_utils.db import clear_db_backfills, clear_db_dags, clear_db_runs, clear_db_serialized_dags + +from dev.tests_common.test_utils.db import ( + clear_db_backfills, + clear_db_dags, + clear_db_runs, + clear_db_serialized_dags, +) pytestmark = [pytest.mark.db_test, pytest.mark.need_serialized_dag] diff --git a/tests/models/test_base.py b/tests/models/test_base.py index 3224616404d6a..27eeba2912f52 100644 --- a/tests/models/test_base.py +++ b/tests/models/test_base.py @@ -19,7 +19,8 @@ import pytest from airflow.models.base import get_id_collation_args -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/models/test_baseoperator.py b/tests/models/test_baseoperator.py index 3c5b7634d5a99..999529e14a997 100644 --- a/tests/models/test_baseoperator.py +++ b/tests/models/test_baseoperator.py @@ -50,7 +50,8 @@ from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import DagRunType from tests.models import DEFAULT_DATE -from tests.test_utils.mock_operators import DeprecatedOperator, MockOperator + +from dev.tests_common.test_utils.mock_operators import DeprecatedOperator, MockOperator if TYPE_CHECKING: from airflow.utils.context import Context diff --git a/tests/models/test_cleartasks.py b/tests/models/test_cleartasks.py index 380c3c28c5143..810453053dd0c 100644 --- a/tests/models/test_cleartasks.py +++ b/tests/models/test_cleartasks.py @@ -35,8 +35,9 @@ from airflow.utils.state import DagRunState, State, TaskInstanceState from airflow.utils.types import DagRunType from tests.models import DEFAULT_DATE -from tests.test_utils import db -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index c79ca24e03a25..997ef06329fde 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -102,13 +102,19 @@ StaticTestPriorityWeightStrategy, TestPriorityWeightStrategyPlugin, ) -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_assets, clear_db_dags, clear_db_runs, clear_db_serialized_dags -from tests.test_utils.mapping import expand_mapped_task -from tests.test_utils.mock_plugins import mock_plugin_manager -from tests.test_utils.timetables import cron_timetable, delta_timetable + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import ( + clear_db_assets, + clear_db_dags, + clear_db_runs, + clear_db_serialized_dags, +) +from dev.tests_common.test_utils.mapping import expand_mapped_task +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager +from dev.tests_common.test_utils.timetables import cron_timetable, delta_timetable if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType @@ -2296,14 +2302,14 @@ def test_next_dagrun_info_on_29_feb(self): ) def test_access_control_format(self, fab_version, perms, expected_exception, expected_perms): if expected_exception: - with patch("airflow.models.dag.FAB_VERSION", fab_version): + with patch("airflow.providers.fab.__version__", fab_version): with pytest.raises( expected_exception, match="Please upgrade the FAB provider to a version >= 1.3.0 to allow use the Dag Level Access Control new format.", ): DAG(dag_id="dag_test", schedule=None, access_control=perms) else: - with patch("airflow.models.dag.FAB_VERSION", fab_version): + with patch("airflow.providers.fab.__version__", fab_version): dag = DAG(dag_id="dag_test", schedule=None, access_control=perms) assert dag.access_control == expected_perms diff --git a/tests/models/test_dagbag.py b/tests/models/test_dagbag.py index 0179fa865291d..0b477cc24c636 100644 --- a/tests/models/test_dagbag.py +++ b/tests/models/test_dagbag.py @@ -46,9 +46,10 @@ from airflow.www.security_appless import ApplessAirflowSecurityManager from tests import cluster_policies from tests.models import TEST_DAGS_FOLDER -from tests.test_utils import db -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/models/test_dagcode.py b/tests/models/test_dagcode.py index e566e9ceed080..0bc105cfaf42c 100644 --- a/tests/models/test_dagcode.py +++ b/tests/models/test_dagcode.py @@ -30,7 +30,8 @@ # To move it to a shared module. from airflow.utils.file import open_maybe_zipped from airflow.utils.session import create_session -from tests.test_utils.db import clear_db_dag_code + +from dev.tests_common.test_utils.db import clear_db_dag_code pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/models/test_dagrun.py b/tests/models/test_dagrun.py index dac2982b0ba34..46773232f77fe 100644 --- a/tests/models/test_dagrun.py +++ b/tests/models/test_dagrun.py @@ -47,10 +47,11 @@ from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import DagRunType from tests.models import DEFAULT_DATE as _DEFAULT_DATE -from tests.test_utils import db -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.mock_operators import MockOperator if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/models/test_dagwarning.py b/tests/models/test_dagwarning.py index d3ef85d351a3f..9b98e0c31d74d 100644 --- a/tests/models/test_dagwarning.py +++ b/tests/models/test_dagwarning.py @@ -25,7 +25,8 @@ from airflow.models import DagModel from airflow.models.dagwarning import DagWarning -from tests.test_utils.db import clear_db_dags + +from dev.tests_common.test_utils.db import clear_db_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/models/test_mappedoperator.py b/tests/models/test_mappedoperator.py index 0571e07e671f8..927dbfd73a563 100644 --- a/tests/models/test_mappedoperator.py +++ b/tests/models/test_mappedoperator.py @@ -42,8 +42,13 @@ from airflow.utils.trigger_rule import TriggerRule from airflow.utils.xcom import XCOM_RETURN_KEY from tests.models import DEFAULT_DATE -from tests.test_utils.mapping import expand_mapped_task -from tests.test_utils.mock_operators import MockOperator, MockOperatorWithNestedFields, NestedFields + +from dev.tests_common.test_utils.mapping import expand_mapped_task +from dev.tests_common.test_utils.mock_operators import ( + MockOperator, + MockOperatorWithNestedFields, + NestedFields, +) pytestmark = pytest.mark.db_test diff --git a/tests/models/test_param.py b/tests/models/test_param.py index 3d85a957ec5d9..2d324478deefe 100644 --- a/tests/models/test_param.py +++ b/tests/models/test_param.py @@ -26,7 +26,8 @@ from airflow.serialization.serialized_objects import BaseSerialization from airflow.utils import timezone from airflow.utils.types import DagRunType -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom class TestParam: diff --git a/tests/models/test_pool.py b/tests/models/test_pool.py index 3bd3b4f5ffb0f..22852261671ce 100644 --- a/tests/models/test_pool.py +++ b/tests/models/test_pool.py @@ -27,7 +27,13 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State -from tests.test_utils.db import clear_db_dags, clear_db_pools, clear_db_runs, set_default_pool_slots + +from dev.tests_common.test_utils.db import ( + clear_db_dags, + clear_db_pools, + clear_db_runs, + set_default_pool_slots, +) pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/models/test_renderedtifields.py b/tests/models/test_renderedtifields.py index 1de83954ee4bd..ea22d31871db9 100644 --- a/tests/models/test_renderedtifields.py +++ b/tests/models/test_renderedtifields.py @@ -34,8 +34,9 @@ from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.task_instance_session import set_current_task_instance_session from airflow.utils.timezone import datetime -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields pytestmark = pytest.mark.db_test diff --git a/tests/models/test_serialized_dag.py b/tests/models/test_serialized_dag.py index 93845e95832cf..5e6714feda373 100644 --- a/tests/models/test_serialized_dag.py +++ b/tests/models/test_serialized_dag.py @@ -36,8 +36,9 @@ from airflow.settings import json from airflow.utils.hashlib_wrapper import md5 from airflow.utils.session import create_session -from tests.test_utils import db -from tests.test_utils.asserts import assert_queries_count + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.asserts import assert_queries_count pytestmark = pytest.mark.db_test diff --git a/tests/models/test_skipmixin.py b/tests/models/test_skipmixin.py index c24bfb7c86444..0f2406c5737c5 100644 --- a/tests/models/test_skipmixin.py +++ b/tests/models/test_skipmixin.py @@ -31,7 +31,8 @@ from airflow.utils import timezone from airflow.utils.state import State from airflow.utils.types import DagRunType -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/models/test_taskinstance.py b/tests/models/test_taskinstance.py index c09d3575d1eca..03566dca30bc9 100644 --- a/tests/models/test_taskinstance.py +++ b/tests/models/test_taskinstance.py @@ -99,11 +99,12 @@ from airflow.utils.types import DagRunType from airflow.utils.xcom import XCOM_RETURN_KEY from tests.models import DEFAULT_DATE, TEST_DAGS_FOLDER -from tests.test_utils import db -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_connections, clear_db_runs -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_connections, clear_db_runs +from dev.tests_common.test_utils.mock_operators import MockOperator if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/models/test_timestamp.py b/tests/models/test_timestamp.py index a2d4a92f16660..4888c2fdac268 100644 --- a/tests/models/test_timestamp.py +++ b/tests/models/test_timestamp.py @@ -25,7 +25,8 @@ from airflow.utils import timezone from airflow.utils.session import provide_session from airflow.utils.state import State -from tests.test_utils.db import clear_db_dags, clear_db_logs, clear_db_runs + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_logs, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/models/test_trigger.py b/tests/models/test_trigger.py index 407d6edd753a8..b44e4b9f6e881 100644 --- a/tests/models/test_trigger.py +++ b/tests/models/test_trigger.py @@ -42,7 +42,8 @@ from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.xcom import XCOM_RETURN_KEY -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/models/test_variable.py b/tests/models/test_variable.py index e9509b4569db4..dfe448092c842 100644 --- a/tests/models/test_variable.py +++ b/tests/models/test_variable.py @@ -27,8 +27,9 @@ from airflow.models import Variable, crypto, variable from airflow.secrets.cache import SecretCache from airflow.secrets.metastore import MetastoreBackend -from tests.test_utils import db -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils import db +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/models/test_xcom.py b/tests/models/test_xcom.py index 07533ec944be8..17ea5fa4ff114 100644 --- a/tests/models/test_xcom.py +++ b/tests/models/test_xcom.py @@ -35,8 +35,9 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.xcom import XCOM_RETURN_KEY -from tests.test_utils.config import conf_vars -from tests.www.test_utils import is_db_isolation_mode + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import is_db_isolation_mode pytestmark = pytest.mark.db_test diff --git a/tests/models/test_xcom_arg.py b/tests/models/test_xcom_arg.py index fcc2e546009c9..b161020d1fb91 100644 --- a/tests/models/test_xcom_arg.py +++ b/tests/models/test_xcom_arg.py @@ -22,8 +22,9 @@ from airflow.operators.python import PythonOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.types import NOTSET -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/operators/test_branch_operator.py b/tests/operators/test_branch_operator.py index 377473e0cc385..8943bf580e50a 100644 --- a/tests/operators/test_branch_operator.py +++ b/tests/operators/test_branch_operator.py @@ -28,7 +28,8 @@ from airflow.utils.state import State from airflow.utils.task_group import TaskGroup from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/operators/test_email.py b/tests/operators/test_email.py index 04c4cf7fd8f69..c86e0f94f6145 100644 --- a/tests/operators/test_email.py +++ b/tests/operators/test_email.py @@ -24,7 +24,8 @@ from airflow.operators.email import EmailOperator from airflow.utils import timezone -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/operators/test_generic_transfer.py b/tests/operators/test_generic_transfer.py index c877d7bed99cd..e1281ed4b2476 100644 --- a/tests/operators/test_generic_transfer.py +++ b/tests/operators/test_generic_transfer.py @@ -60,7 +60,7 @@ def teardown_method(self): ], ) def test_mysql_to_mysql(self, client): - from tests.providers.mysql.hooks.test_mysql import MySqlContext + from providers.tests.mysql.hooks.test_mysql import MySqlContext with MySqlContext(client): sql = "SELECT * FROM connection;" diff --git a/tests/operators/test_latest_only_operator.py b/tests/operators/test_latest_only_operator.py index 58a43ae7e66b6..78fabc4ca922f 100644 --- a/tests/operators/test_latest_only_operator.py +++ b/tests/operators/test_latest_only_operator.py @@ -30,8 +30,9 @@ from airflow.utils.state import State from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs, clear_db_xcom + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/operators/test_python.py b/tests/operators/test_python.py index c98dc7018a4a1..3dd8f506ee986 100644 --- a/tests/operators/test_python.py +++ b/tests/operators/test_python.py @@ -68,9 +68,10 @@ from airflow.utils.state import DagRunState, State, TaskInstanceState from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import NOTSET, DagRunType -from tests.test_utils import AIRFLOW_MAIN_FOLDER -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/plugins/test_plugin.py b/tests/plugins/test_plugin.py index 995b14e23e751..fd474766d41ea 100644 --- a/tests/plugins/test_plugin.py +++ b/tests/plugins/test_plugin.py @@ -35,7 +35,8 @@ from airflow.timetables.interval import CronDataIntervalTimetable from tests.listeners import empty_listener from tests.listeners.class_listener import ClassBasedListener -from tests.test_utils.mock_operators import ( + +from dev.tests_common.test_utils.mock_operators import ( AirflowLink, AirflowLink2, CustomBaseIndexOpLink, diff --git a/tests/plugins/test_plugins_manager.py b/tests/plugins/test_plugins_manager.py index 7e4bedbfb8c1b..1c54c3ebb195c 100644 --- a/tests/plugins/test_plugins_manager.py +++ b/tests/plugins/test_plugins_manager.py @@ -33,8 +33,9 @@ from airflow.plugins_manager import AirflowPlugin from airflow.utils.module_loading import qualname from airflow.www import app as application -from tests.test_utils.config import conf_vars -from tests.test_utils.mock_plugins import mock_plugin_manager + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager pytestmark = pytest.mark.db_test diff --git a/tests/secrets/test_cache.py b/tests/secrets/test_cache.py index 8d45080868691..40ab4aa290df5 100644 --- a/tests/secrets/test_cache.py +++ b/tests/secrets/test_cache.py @@ -22,7 +22,8 @@ import pytest from airflow.secrets.cache import SecretCache -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars def test_cache_disabled_by_default(): diff --git a/tests/security/test_kerberos.py b/tests/security/test_kerberos.py index b424edb820891..2b661fe9a1478 100644 --- a/tests/security/test_kerberos.py +++ b/tests/security/test_kerberos.py @@ -25,7 +25,8 @@ from airflow.security import kerberos from airflow.security.kerberos import get_kerberos_principle, renew_from_kt -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/sensors/test_base.py b/tests/sensors/test_base.py index 43c8fa5a64bd8..8a3b2b5c5c0fa 100644 --- a/tests/sensors/test_base.py +++ b/tests/sensors/test_base.py @@ -57,7 +57,8 @@ from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.timezone import datetime -from tests.test_utils import db + +from dev.tests_common.test_utils import db pytestmark = pytest.mark.db_test @@ -68,7 +69,7 @@ TEST_DAG_ID = "unit_test_dag" DUMMY_OP = "dummy_op" SENSOR_OP = "sensor_op" -DEV_NULL = "dev/null" +DEV_NULL = "/dev/null" @pytest.fixture diff --git a/tests/sensors/test_external_task_sensor.py b/tests/sensors/test_external_task_sensor.py index 9947a197a0335..f1739e6b84418 100644 --- a/tests/sensors/test_external_task_sensor.py +++ b/tests/sensors/test_external_task_sensor.py @@ -52,9 +52,10 @@ from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType from tests.models import TEST_DAGS_FOLDER -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs +from dev.tests_common.test_utils.mock_operators import MockOperator if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py index f0f517042314a..35f5e196a2222 100644 --- a/tests/serialization/test_dag_serialization.py +++ b/tests/serialization/test_dag_serialization.py @@ -80,9 +80,14 @@ from airflow.utils.operator_resources import Resources from airflow.utils.task_group import TaskGroup from airflow.utils.xcom import XCOM_RETURN_KEY -from tests.test_utils.compat import BaseOperatorLink -from tests.test_utils.mock_operators import AirflowLink2, CustomOperator, GoogleLink, MockOperator -from tests.test_utils.timetables import CustomSerializationTimetable, cron_timetable, delta_timetable + +from dev.tests_common.test_utils.compat import BaseOperatorLink +from dev.tests_common.test_utils.mock_operators import AirflowLink2, CustomOperator, GoogleLink, MockOperator +from dev.tests_common.test_utils.timetables import ( + CustomSerializationTimetable, + cron_timetable, + delta_timetable, +) if TYPE_CHECKING: from airflow.utils.context import Context @@ -185,7 +190,9 @@ "max_retry_delay": 600.0, "downstream_task_ids": [], "_is_empty": False, - "_operator_extra_links": [{"tests.test_utils.mock_operators.CustomOpLink": {}}], + "_operator_extra_links": [ + {"dev.tests_common.test_utils.mock_operators.CustomOpLink": {}} + ], "ui_color": "#fff", "ui_fgcolor": "#000", "template_ext": [], @@ -193,7 +200,7 @@ "template_fields_renderers": {}, "_task_type": "CustomOperator", "_operator_name": "@custom", - "_task_module": "tests.test_utils.mock_operators", + "_task_module": "dev.tests_common.test_utils.mock_operators", "pool": "default_pool", "is_setup": False, "is_teardown": False, @@ -233,7 +240,7 @@ ) CUSTOM_TIMETABLE_SERIALIZED = { - "__type": "tests.test_utils.timetables.CustomSerializationTimetable", + "__type": "dev.tests_common.test_utils.timetables.CustomSerializationTimetable", "__var": {"value": "foo"}, } @@ -310,8 +317,8 @@ def get_excluded_patterns() -> Generator[str, None, None]: if python_version in provider_info.get("excluded-python-versions"): provider_path = provider.replace(".", "/") yield f"airflow/providers/{provider_path}/" - yield f"tests/providers/{provider_path}/" - yield f"tests/system/providers/{provider_path}/" + yield f"providers/tests/{provider_path}/" + yield f"providers/tests/system/{provider_path}/" def collect_dags(dag_folder=None): @@ -328,10 +335,10 @@ def collect_dags(dag_folder=None): else: patterns = [ "airflow/example_dags", - "airflow/providers/*/example_dags", # TODO: Remove once AIP-47 is completed - "airflow/providers/*/*/example_dags", # TODO: Remove once AIP-47 is completed - "tests/system/providers/*/", - "tests/system/providers/*/*/", + "providers/src/airflow/providers/*/example_dags", # TODO: Remove once AIP-47 is completed + "providers/src/airflow/providers/*/*/example_dags", # TODO: Remove once AIP-47 is completed + "providers/tests/system/*/", + "providers/tests/system/*/*/", ] excluded_patterns = [f"{ROOT_FOLDER}/{excluded_pattern}" for excluded_pattern in get_excluded_patterns()] for pattern in patterns: @@ -366,7 +373,7 @@ def timetable_plugin(monkeypatch): monkeypatch.setattr( plugins_manager, "timetable_classes", - {"tests.test_utils.timetables.CustomSerializationTimetable": CustomSerializationTimetable}, + {"dev.tests_common.test_utils.timetables.CustomSerializationTimetable": CustomSerializationTimetable}, ) @@ -462,7 +469,7 @@ def test_dag_serialization_unregistered_custom_timetable(self): message = ( "Failed to serialize DAG 'simple_dag': Timetable class " - "'tests.test_utils.timetables.CustomSerializationTimetable' " + "'dev.tests_common.test_utils.timetables.CustomSerializationTimetable' " "is not registered or " "you have a top level database access that disrupted the session. " "Please check the airflow best practices documentation." @@ -529,8 +536,8 @@ def test_deserialization_across_process(self): def test_roundtrip_provider_example_dags(self): dags = collect_dags( [ - "airflow/providers/*/example_dags", - "airflow/providers/*/*/example_dags", + "src/providers/airflow/providers/*/example_dags", + "src/providers/airflow/providers/*/*/example_dags", ] ) @@ -827,7 +834,7 @@ def test_deserialization_timetable_unregistered(self): SerializedDAG.from_dict(serialized) message = ( "Timetable class " - "'tests.test_utils.timetables.CustomSerializationTimetable' " + "'dev.tests_common.test_utils.timetables.CustomSerializationTimetable' " "is not registered or " "you have a top level database access that disrupted the session. " "Please check the airflow best practices documentation." @@ -979,15 +986,15 @@ def test_task_params_roundtrip(self, val, expected_val): [ pytest.param( "true", - [{"tests.test_utils.mock_operators.CustomOpLink": {}}], + [{"dev.tests_common.test_utils.mock_operators.CustomOpLink": {}}], {"Google Custom": "http://google.com/custom_base_link?search=true"}, id="non-indexed-link", ), pytest.param( ["echo", "true"], [ - {"tests.test_utils.mock_operators.CustomBaseIndexOpLink": {"index": 0}}, - {"tests.test_utils.mock_operators.CustomBaseIndexOpLink": {"index": 1}}, + {"dev.tests_common.test_utils.mock_operators.CustomBaseIndexOpLink": {"index": 0}}, + {"dev.tests_common.test_utils.mock_operators.CustomBaseIndexOpLink": {"index": 1}}, ], { "BigQuery Console #1": "https://console.cloud.google.com/bigquery?j=echo", @@ -1290,7 +1297,7 @@ def test_operator_deserialize_old_names(self): "template_fields": ["bash_command"], "template_fields_renderers": {}, "_task_type": "CustomOperator", - "_task_module": "tests.test_utils.mock_operators", + "_task_module": "dev.tests_common.test_utils.mock_operators", "pool": "default_pool", "ui_color": "#fff", "ui_fgcolor": "#000", @@ -2347,7 +2354,7 @@ def test_operator_expand_xcomarg_serde(): "_is_empty": False, "_is_mapped": True, "_needs_expansion": True, - "_task_module": "tests.test_utils.mock_operators", + "_task_module": "dev.tests_common.test_utils.mock_operators", "_task_type": "MockOperator", "downstream_task_ids": [], "expand_input": { @@ -2402,7 +2409,7 @@ def test_operator_expand_kwargs_literal_serde(strict): "_is_empty": False, "_is_mapped": True, "_needs_expansion": True, - "_task_module": "tests.test_utils.mock_operators", + "_task_module": "dev.tests_common.test_utils.mock_operators", "_task_type": "MockOperator", "downstream_task_ids": [], "expand_input": { @@ -2457,7 +2464,7 @@ def test_operator_expand_kwargs_xcomarg_serde(strict): "_is_empty": False, "_is_mapped": True, "_needs_expansion": True, - "_task_module": "tests.test_utils.mock_operators", + "_task_module": "dev.tests_common.test_utils.mock_operators", "_task_type": "MockOperator", "downstream_task_ids": [], "expand_input": { @@ -2820,7 +2827,7 @@ def operator_extra_links(self): "_disallow_kwargs_override": False, "_expand_input_attr": "expand_input", "downstream_task_ids": [], - "_operator_extra_links": [{"tests.test_utils.mock_operators.AirflowLink2": {}}], + "_operator_extra_links": [{"dev.tests_common.test_utils.mock_operators.AirflowLink2": {}}], "ui_color": "#fff", "ui_fgcolor": "#000", "template_ext": [], diff --git a/tests/serialization/test_pydantic_models.py b/tests/serialization/test_pydantic_models.py index 55c61ea220263..423dc41a38068 100644 --- a/tests/serialization/test_pydantic_models.py +++ b/tests/serialization/test_pydantic_models.py @@ -45,7 +45,8 @@ from airflow.utils.state import State from airflow.utils.types import AttributeRemoved, DagRunType from tests.models import DEFAULT_DATE -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/serialization/test_serde.py b/tests/serialization/test_serde.py index a36013d20cfa7..5c915839c6422 100644 --- a/tests/serialization/test_serde.py +++ b/tests/serialization/test_serde.py @@ -43,7 +43,8 @@ serialize, ) from airflow.utils.module_loading import import_string, iter_namespace, qualname -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars @pytest.fixture diff --git a/tests/serialization/test_serialized_objects.py b/tests/serialization/test_serialized_objects.py index 0bc8a67ef879a..67fdceeaf08d1 100644 --- a/tests/serialization/test_serialized_objects.py +++ b/tests/serialization/test_serialized_objects.py @@ -471,7 +471,8 @@ def test_all_pydantic_models_round_trip(): @pytest.mark.db_test def test_serialized_mapped_operator_unmap(dag_maker): from airflow.serialization.serialized_objects import SerializedDAG - from tests.test_utils.mock_operators import MockOperator + + from dev.tests_common.test_utils.mock_operators import MockOperator with dag_maker(dag_id="dag") as dag: MockOperator(task_id="task1", arg1="x") diff --git a/tests/system/core/example_external_task_child_deferrable.py b/tests/system/core/example_external_task_child_deferrable.py index 781ad4ea5ef1f..4e8eca3b15eb2 100644 --- a/tests/system/core/example_external_task_child_deferrable.py +++ b/tests/system/core/example_external_task_child_deferrable.py @@ -34,7 +34,7 @@ ) -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/core/example_external_task_parent_deferrable.py b/tests/system/core/example_external_task_parent_deferrable.py index 8896fdc1b9be4..1a64f7e98ae8a 100644 --- a/tests/system/core/example_external_task_parent_deferrable.py +++ b/tests/system/core/example_external_task_parent_deferrable.py @@ -57,14 +57,14 @@ start >> [trigger_child_task, external_task_sensor] >> end - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run +from dev.tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/example_empty.py b/tests/system/example_empty.py index 143c79fc2f990..de316a11eee77 100644 --- a/tests/system/example_empty.py +++ b/tests/system/example_empty.py @@ -35,14 +35,14 @@ chain(task) - from tests.system.utils.watcher import watcher + from dev.tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from tests.system.utils import get_test_run # noqa: E402 +from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/providers/microsoft/azure/__init__.py b/tests/system/providers/microsoft/azure/__init__.py deleted file mode 100644 index 217e5db960782..0000000000000 --- a/tests/system/providers/microsoft/azure/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/tests/system/providers/papermill/__init__.py b/tests/system/providers/papermill/__init__.py deleted file mode 100644 index 217e5db960782..0000000000000 --- a/tests/system/providers/papermill/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/tests/task/task_runner/test_standard_task_runner.py b/tests/task/task_runner/test_standard_task_runner.py index 54b8a4d4173e7..55e3d34192d60 100644 --- a/tests/task/task_runner/test_standard_task_runner.py +++ b/tests/task/task_runner/test_standard_task_runner.py @@ -42,8 +42,9 @@ from airflow.utils.timeout import timeout from tests.listeners import xcom_listener from tests.listeners.file_write_listener import FileWriteListener -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/ti_deps/deps/test_pool_slots_available_dep.py b/tests/ti_deps/deps/test_pool_slots_available_dep.py index c81657bbb6488..d7fc33aa41bf8 100644 --- a/tests/ti_deps/deps/test_pool_slots_available_dep.py +++ b/tests/ti_deps/deps/test_pool_slots_available_dep.py @@ -26,7 +26,8 @@ from airflow.ti_deps.deps.pool_slots_available_dep import PoolSlotsAvailableDep from airflow.utils.session import create_session from airflow.utils.state import TaskInstanceState -from tests.test_utils import db + +from dev.tests_common.test_utils import db pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/ti_deps/deps/test_prev_dagrun_dep.py b/tests/ti_deps/deps/test_prev_dagrun_dep.py index 4354a9237d5f9..638f4b69d7e92 100644 --- a/tests/ti_deps/deps/test_prev_dagrun_dep.py +++ b/tests/ti_deps/deps/test_prev_dagrun_dep.py @@ -29,8 +29,9 @@ from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.timezone import convert_to_utc, datetime from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/ti_deps/deps/test_ready_to_reschedule_dep.py b/tests/ti_deps/deps/test_ready_to_reschedule_dep.py index 568d6abf025c7..d137d43bbf38d 100644 --- a/tests/ti_deps/deps/test_ready_to_reschedule_dep.py +++ b/tests/ti_deps/deps/test_ready_to_reschedule_dep.py @@ -30,7 +30,8 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State -from tests.test_utils import db + +from dev.tests_common.test_utils import db pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/utils/log/test_log_reader.py b/tests/utils/log/test_log_reader.py index 3216222909b7a..53e702efd8a8e 100644 --- a/tests/utils/log/test_log_reader.py +++ b/tests/utils/log/test_log_reader.py @@ -38,8 +38,9 @@ from airflow.utils.log.logging_mixin import ExternalLoggingMixin from airflow.utils.state import TaskInstanceState from airflow.utils.types import DagRunType -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/utils/log/test_secrets_masker.py b/tests/utils/log/test_secrets_masker.py index 5781139677dfd..f3fed73675cce 100644 --- a/tests/utils/log/test_secrets_masker.py +++ b/tests/utils/log/test_secrets_masker.py @@ -38,7 +38,8 @@ should_hide_value_for_key, ) from airflow.utils.state import DagRunState, JobState, State, TaskInstanceState -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.enable_redact p = "password" diff --git a/tests/utils/log/test_task_context_logger.py b/tests/utils/log/test_task_context_logger.py index deca4e55484b4..3a08947ede091 100644 --- a/tests/utils/log/test_task_context_logger.py +++ b/tests/utils/log/test_task_context_logger.py @@ -24,8 +24,9 @@ from airflow.models.taskinstancekey import TaskInstanceKey from airflow.utils.log.task_context_logger import TaskContextLogger -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_db.py b/tests/utils/test_db.py index ce77f80297fcf..0f3a56a5d1c8d 100644 --- a/tests/utils/test_db.py +++ b/tests/utils/test_db.py @@ -50,7 +50,8 @@ upgradedb, ) from airflow.utils.db_manager import RunDBManager -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/utils/test_db_cleanup.py b/tests/utils/test_db_cleanup.py index 0a8cd9c90c962..3b5a3b2227231 100644 --- a/tests/utils/test_db_cleanup.py +++ b/tests/utils/test_db_cleanup.py @@ -48,7 +48,13 @@ run_cleanup, ) from airflow.utils.session import create_session -from tests.test_utils.db import clear_db_assets, clear_db_dags, clear_db_runs, drop_tables_with_prefix + +from dev.tests_common.test_utils.db import ( + clear_db_assets, + clear_db_dags, + clear_db_runs, + drop_tables_with_prefix, +) pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/utils/test_db_manager.py b/tests/utils/test_db_manager.py index 1c8a6c6c7dfc8..975ea65499aae 100644 --- a/tests/utils/test_db_manager.py +++ b/tests/utils/test_db_manager.py @@ -25,7 +25,8 @@ from airflow.models import Base from airflow.utils.db import downgrade, initdb from airflow.utils.db_manager import BaseDBManager, RunDBManager -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test] diff --git a/tests/utils/test_dot_renderer.py b/tests/utils/test_dot_renderer.py index 0376848fce829..d1521ca6e0f69 100644 --- a/tests/utils/test_dot_renderer.py +++ b/tests/utils/test_dot_renderer.py @@ -29,8 +29,9 @@ from airflow.utils import dot_renderer, timezone from airflow.utils.state import State from airflow.utils.task_group import TaskGroup -from tests.test_utils.compat import BashOperator -from tests.test_utils.db import clear_db_dags + +from dev.tests_common.test_utils.compat import BashOperator +from dev.tests_common.test_utils.db import clear_db_dags START_DATE = timezone.utcnow() diff --git a/tests/utils/test_email.py b/tests/utils/test_email.py index bf5f3fc0a18fb..b47dcbc87585a 100644 --- a/tests/utils/test_email.py +++ b/tests/utils/test_email.py @@ -29,7 +29,8 @@ from airflow.configuration import conf from airflow.utils import email -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars EMAILS = ["test1@example.com", "test2@example.com"] diff --git a/tests/utils/test_file.py b/tests/utils/test_file.py index 50cc37ca57ce3..2ffb89ec34196 100644 --- a/tests/utils/test_file.py +++ b/tests/utils/test_file.py @@ -27,7 +27,8 @@ from airflow.utils import file as file_utils from airflow.utils.file import correct_maybe_zipped, find_path_from_directory, open_maybe_zipped from tests.models import TEST_DAGS_FOLDER -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars def might_contain_dag(file_path: str, zip_file: zipfile.ZipFile | None = None): diff --git a/tests/utils/test_helpers.py b/tests/utils/test_helpers.py index 478604186e3a4..0b0046b1ba082 100644 --- a/tests/utils/test_helpers.py +++ b/tests/utils/test_helpers.py @@ -37,8 +37,9 @@ validate_key, ) from airflow.utils.types import NOTSET -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs if TYPE_CHECKING: from airflow.jobs.job import Job diff --git a/tests/utils/test_log_handlers.py b/tests/utils/test_log_handlers.py index 8de3b03a1af1b..4be3564567ebd 100644 --- a/tests/utils/test_log_handlers.py +++ b/tests/utils/test_log_handlers.py @@ -54,8 +54,9 @@ from airflow.utils.state import State, TaskInstanceState from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index 50d44cf125b3c..60823911b8b03 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -24,7 +24,8 @@ from airflow.exceptions import AirflowConfigException from airflow.utils import net -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars def get_hostname(): diff --git a/tests/utils/test_serve_logs.py b/tests/utils/test_serve_logs.py index b6756502c10c2..8d41202be2545 100644 --- a/tests/utils/test_serve_logs.py +++ b/tests/utils/test_serve_logs.py @@ -28,7 +28,8 @@ from airflow.utils import timezone from airflow.utils.jwt_signer import JWTSigner from airflow.utils.serve_logs import create_app -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars if TYPE_CHECKING: from flask.testing import FlaskClient diff --git a/tests/utils/test_sqlalchemy.py b/tests/utils/test_sqlalchemy.py index 4352898090fec..42717a4fb045a 100644 --- a/tests/utils/test_sqlalchemy.py +++ b/tests/utils/test_sqlalchemy.py @@ -42,7 +42,8 @@ ) from airflow.utils.state import State from airflow.utils.timezone import utcnow -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_state.py b/tests/utils/test_state.py index 9356a9bbc54b5..6447477ef589b 100644 --- a/tests/utils/test_state.py +++ b/tests/utils/test_state.py @@ -26,7 +26,8 @@ from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType from tests.models import DEFAULT_DATE -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_task_group.py b/tests/utils/test_task_group.py index a6008dc58c03f..6ea0fcdc36e13 100644 --- a/tests/utils/test_task_group.py +++ b/tests/utils/test_task_group.py @@ -39,7 +39,8 @@ from airflow.utils.dag_edges import dag_edges from airflow.utils.task_group import TASKGROUP_ARGS_EXPECTED_TYPES, TaskGroup, task_group_to_dict from tests.models import DEFAULT_DATE -from tests.test_utils.compat import BashOperator + +from dev.tests_common.test_utils.compat import BashOperator def make_task(name, type_="classic"): @@ -1414,7 +1415,7 @@ def test_task_group_edge_modifier_chain(): def test_mapped_task_group_id_prefix_task_id(): - from tests.test_utils.mock_operators import MockOperator + from dev.tests_common.test_utils.mock_operators import MockOperator with DAG(dag_id="d", schedule=None, start_date=DEFAULT_DATE) as dag: t1 = MockOperator.partial(task_id="t1").expand(arg1=[]) diff --git a/tests/utils/test_task_handler_with_custom_formatter.py b/tests/utils/test_task_handler_with_custom_formatter.py index a328253b7f83b..aee646d858b15 100644 --- a/tests/utils/test_task_handler_with_custom_formatter.py +++ b/tests/utils/test_task_handler_with_custom_formatter.py @@ -28,9 +28,10 @@ from airflow.utils.state import DagRunState from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_types.py b/tests/utils/test_types.py index a338f6e65887f..cb5849b20599a 100644 --- a/tests/utils/test_types.py +++ b/tests/utils/test_types.py @@ -26,7 +26,8 @@ from airflow.utils.state import State from airflow.utils.types import DagRunType from tests.models import DEFAULT_DATE -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/test_app.py b/tests/www/test_app.py index 053fd1174dcae..673abd371dfb5 100644 --- a/tests/www/test_app.py +++ b/tests/www/test_app.py @@ -29,8 +29,9 @@ from airflow.exceptions import AirflowConfigException from airflow.www import app as application -from tests.test_utils.config import conf_vars -from tests.test_utils.decorators import dont_initialize_flask_app_submodules + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/www/test_utils.py b/tests/www/test_utils.py index 2ba1364328943..f8f0ae983382e 100644 --- a/tests/www/test_utils.py +++ b/tests/www/test_utils.py @@ -18,7 +18,6 @@ from __future__ import annotations import itertools -import os import re import time from datetime import datetime @@ -45,7 +44,8 @@ wrapped_markdown, ) from airflow.www.widgets import AirflowDateTimePickerROWidget, BS3TextAreaROWidget, BS3TextFieldROWidget -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType @@ -712,7 +712,3 @@ class TestForm(FlaskForm): assert 'readonly="true"' in html_output assert "form-control" in html_output - - -def is_db_isolation_mode(): - return os.environ.get("RUN_TESTS_WITH_DATABASE_ISOLATION", "false").lower() == "true" diff --git a/tests/www/views/conftest.py b/tests/www/views/conftest.py index a4b9145797c9b..faa1cc23a8d36 100644 --- a/tests/www/views/conftest.py +++ b/tests/www/views/conftest.py @@ -27,10 +27,15 @@ from airflow import settings from airflow.models import DagBag from airflow.www.app import create_app -from tests.test_utils.api_connexion_utils import delete_user -from tests.test_utils.config import conf_vars -from tests.test_utils.decorators import dont_initialize_flask_app_submodules -from tests.test_utils.www import client_with_login, client_without_login, client_without_login_as_admin + +from dev.tests_common.test_utils.api_connexion_utils import delete_user +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from dev.tests_common.test_utils.www import ( + client_with_login, + client_without_login, + client_without_login_as_admin, +) @pytest.fixture(autouse=True, scope="module") diff --git a/tests/www/views/test_anonymous_as_admin_role.py b/tests/www/views/test_anonymous_as_admin_role.py index b7603d1eae5bb..700f03f8e63c8 100644 --- a/tests/www/views/test_anonymous_as_admin_role.py +++ b/tests/www/views/test_anonymous_as_admin_role.py @@ -23,7 +23,8 @@ from airflow.models import Pool from airflow.utils.session import create_session -from tests.test_utils.www import check_content_in_response + +from dev.tests_common.test_utils.www import check_content_in_response pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_session.py b/tests/www/views/test_session.py index 0ec219aaeb4b3..009917606bda3 100644 --- a/tests/www/views/test_session.py +++ b/tests/www/views/test_session.py @@ -22,8 +22,9 @@ from airflow.exceptions import AirflowConfigException from airflow.www import app -from tests.test_utils.config import conf_vars -from tests.test_utils.decorators import dont_initialize_flask_app_submodules + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views.py b/tests/www/views/test_views.py index b51fd3ab1c0a5..bae022316c73d 100644 --- a/tests/www/views/test_views.py +++ b/tests/www/views/test_views.py @@ -42,10 +42,11 @@ get_task_stats_from_query, get_value_from_path, ) -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.mock_plugins import mock_plugin_manager -from tests.test_utils.www import check_content_in_response, check_content_not_in_response + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager +from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType @@ -329,7 +330,8 @@ def test_mark_task_instance_state(test_app): from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType from airflow.www.views import Airflow - from tests.test_utils.db import clear_db_runs + + from dev.tests_common.test_utils.db import clear_db_runs clear_db_runs() start_date = datetime(2020, 1, 1) @@ -422,7 +424,8 @@ def test_mark_task_group_state(test_app): from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType from airflow.www.views import Airflow - from tests.test_utils.db import clear_db_runs + + from dev.tests_common.test_utils.db import clear_db_runs clear_db_runs() start_date = datetime(2020, 1, 1) diff --git a/tests/www/views/test_views_acl.py b/tests/www/views/test_views_acl.py index 7b60a2c11ecad..23fb26c18cf98 100644 --- a/tests/www/views/test_views_acl.py +++ b/tests/www/views/test_views_acl.py @@ -30,11 +30,16 @@ from airflow.utils.state import State from airflow.utils.types import DagRunType from airflow.www.views import FILTER_STATUS_COOKIE -from tests.test_utils.api_connexion_utils import create_user_scope -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs -from tests.test_utils.permissions import _resource_name -from tests.test_utils.www import check_content_in_response, check_content_not_in_response, client_with_login + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs +from dev.tests_common.test_utils.permissions import _resource_name +from dev.tests_common.test_utils.www import ( + check_content_in_response, + check_content_not_in_response, + client_with_login, +) +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user_scope if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_base.py b/tests/www/views/test_views_base.py index a125ca2d72835..692f0301e2276 100644 --- a/tests/www/views/test_views_base.py +++ b/tests/www/views/test_views_base.py @@ -27,9 +27,10 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.www import app as application -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.config import conf_vars -from tests.test_utils.www import check_content_in_response, check_content_not_in_response + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_cluster_activity.py b/tests/www/views/test_views_cluster_activity.py index 543d563e8da03..6b83de3c7682a 100644 --- a/tests/www/views/test_views_cluster_activity.py +++ b/tests/www/views/test_views_cluster_activity.py @@ -26,7 +26,8 @@ from airflow.operators.empty import EmptyOperator from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType -from tests.test_utils.db import clear_db_runs + +from dev.tests_common.test_utils.db import clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_configuration.py b/tests/www/views/test_views_configuration.py index e874cf3267916..90ab9b7faa933 100644 --- a/tests/www/views/test_views_configuration.py +++ b/tests/www/views/test_views_configuration.py @@ -21,8 +21,9 @@ import pytest from airflow.configuration import conf -from tests.test_utils.config import conf_vars -from tests.test_utils.www import check_content_in_response, check_content_not_in_response + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_connection.py b/tests/www/views/test_views_connection.py index 7530a1f677f9b..66e571780eb96 100644 --- a/tests/www/views/test_views_connection.py +++ b/tests/www/views/test_views_connection.py @@ -27,7 +27,12 @@ from airflow.models import Connection from airflow.utils.session import create_session from airflow.www.views import ConnectionFormWidget, ConnectionModelView -from tests.test_utils.www import _check_last_log, _check_last_log_masked_connection, check_content_in_response + +from dev.tests_common.test_utils.www import ( + _check_last_log, + _check_last_log_masked_connection, + check_content_in_response, +) pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_custom_user_views.py b/tests/www/views/test_views_custom_user_views.py index 84947a8e5f36f..49ecc2c4f30b2 100644 --- a/tests/www/views/test_views_custom_user_views.py +++ b/tests/www/views/test_views_custom_user_views.py @@ -27,11 +27,16 @@ from airflow import settings from airflow.security import permissions from airflow.www import app as application -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( - create_user as create_user, + +from dev.tests_common.test_utils.www import ( + check_content_in_response, + check_content_not_in_response, + client_with_login, +) +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( + create_user, delete_role, ) -from tests.test_utils.www import check_content_in_response, check_content_not_in_response, client_with_login pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_dagrun.py b/tests/www/views/test_views_dagrun.py index 9e45464c28aa1..3fb8eb02fb633 100644 --- a/tests/www/views/test_views_dagrun.py +++ b/tests/www/views/test_views_dagrun.py @@ -24,14 +24,19 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.www.views import DagRunModelView -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( +from tests.www.views.test_views_tasks import _get_appbuilder_pk_string + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.www import ( + check_content_in_response, + check_content_not_in_response, + client_with_login, +) +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, delete_roles, delete_user, ) -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.www import check_content_in_response, check_content_not_in_response, client_with_login -from tests.www.views.test_views_tasks import _get_appbuilder_pk_string if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_dataset.py b/tests/www/views/test_views_dataset.py index 3db16a996862e..899d64e9754ed 100644 --- a/tests/www/views/test_views_dataset.py +++ b/tests/www/views/test_views_dataset.py @@ -24,8 +24,9 @@ from airflow.assets import Asset from airflow.models.asset import AssetEvent, AssetModel from airflow.operators.empty import EmptyOperator -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.db import clear_db_assets + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.db import clear_db_assets pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_decorators.py b/tests/www/views/test_views_decorators.py index 86bce2f3bbbc2..bdc402989fd0c 100644 --- a/tests/www/views/test_views_decorators.py +++ b/tests/www/views/test_views_decorators.py @@ -23,9 +23,14 @@ from airflow.utils import timezone from airflow.utils.state import State from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.db import clear_db_runs, clear_db_variables -from tests.test_utils.www import _check_last_log, _check_last_log_masked_variable, check_content_in_response + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_variables +from dev.tests_common.test_utils.www import ( + _check_last_log, + _check_last_log_masked_variable, + check_content_in_response, +) if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_extra_links.py b/tests/www/views/test_views_extra_links.py index 4fb0d2d40b017..fbc11bf51f337 100644 --- a/tests/www/views/test_views_extra_links.py +++ b/tests/www/views/test_views_extra_links.py @@ -28,9 +28,10 @@ from airflow.utils import timezone from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, BaseOperatorLink -from tests.test_utils.db import clear_db_runs -from tests.test_utils.mock_operators import AirflowLink, Dummy2TestOperator, Dummy3TestOperator + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BaseOperatorLink +from dev.tests_common.test_utils.db import clear_db_runs +from dev.tests_common.test_utils.mock_operators import AirflowLink, Dummy2TestOperator, Dummy3TestOperator if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_grid.py b/tests/www/views/test_views_grid.py index 8d5e4e22e04b8..4f05f2a13456b 100644 --- a/tests/www/views/test_views_grid.py +++ b/tests/www/views/test_views_grid.py @@ -35,9 +35,10 @@ from airflow.utils.task_group import TaskGroup from airflow.utils.types import DagRunType from airflow.www.views import dag_to_grid -from tests.test_utils.asserts import assert_queries_count -from tests.test_utils.db import clear_db_assets, clear_db_runs -from tests.test_utils.mock_operators import MockOperator + +from dev.tests_common.test_utils.asserts import assert_queries_count +from dev.tests_common.test_utils.db import clear_db_assets, clear_db_runs +from dev.tests_common.test_utils.mock_operators import MockOperator pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_home.py b/tests/www/views/test_views_home.py index 6a5bf3c4ad531..4de032c58176e 100644 --- a/tests/www/views/test_views_home.py +++ b/tests/www/views/test_views_home.py @@ -27,10 +27,15 @@ from airflow.utils.state import State from airflow.www.utils import UIAlert from airflow.www.views import FILTER_LASTRUN_COOKIE, FILTER_STATUS_COOKIE, FILTER_TAGS_COOKIE -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user -from tests.test_utils.db import clear_db_dags, clear_db_import_errors, clear_db_serialized_dags -from tests.test_utils.permissions import _resource_name -from tests.test_utils.www import check_content_in_response, check_content_not_in_response, client_with_login + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_import_errors, clear_db_serialized_dags +from dev.tests_common.test_utils.permissions import _resource_name +from dev.tests_common.test_utils.www import ( + check_content_in_response, + check_content_not_in_response, + client_with_login, +) +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_log.py b/tests/www/views/test_views_log.py index 72e282c7b3f8c..c025ebcbbe8ed 100644 --- a/tests/www/views/test_views_log.py +++ b/tests/www/views/test_views_log.py @@ -40,11 +40,12 @@ from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType from airflow.www.app import create_app -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_dags, clear_db_runs -from tests.test_utils.decorators import dont_initialize_flask_app_submodules -from tests.test_utils.www import client_with_login + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from dev.tests_common.test_utils.www import client_with_login if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_mount.py b/tests/www/views/test_views_mount.py index f0c052294b60a..e58675439ec12 100644 --- a/tests/www/views/test_views_mount.py +++ b/tests/www/views/test_views_mount.py @@ -22,7 +22,8 @@ import werkzeug.wrappers from airflow.www.app import create_app -from tests.test_utils.config import conf_vars + +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_paused.py b/tests/www/views/test_views_paused.py index 46b0a3aa03f1a..551cdd180e078 100644 --- a/tests/www/views/test_views_paused.py +++ b/tests/www/views/test_views_paused.py @@ -19,7 +19,8 @@ import pytest from airflow.models.log import Log -from tests.test_utils.db import clear_db_dags + +from dev.tests_common.test_utils.db import clear_db_dags pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_pool.py b/tests/www/views/test_views_pool.py index 020c5353a5f34..9b9ddfaeeb79e 100644 --- a/tests/www/views/test_views_pool.py +++ b/tests/www/views/test_views_pool.py @@ -23,7 +23,8 @@ from airflow.models import Pool from airflow.utils.session import create_session -from tests.test_utils.www import check_content_in_response, check_content_not_in_response + +from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_rate_limit.py b/tests/www/views/test_views_rate_limit.py index 1f3eaf11d2ad0..d843c8ed54b09 100644 --- a/tests/www/views/test_views_rate_limit.py +++ b/tests/www/views/test_views_rate_limit.py @@ -20,9 +20,10 @@ import pytest from airflow.www.app import create_app -from tests.test_utils.config import conf_vars -from tests.test_utils.decorators import dont_initialize_flask_app_submodules -from tests.test_utils.www import client_with_login + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from dev.tests_common.test_utils.www import client_with_login pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_rendered.py b/tests/www/views/test_views_rendered.py index 87e693d5400ab..6d96cdc4d625f 100644 --- a/tests/www/views/test_views_rendered.py +++ b/tests/www/views/test_views_rendered.py @@ -34,10 +34,15 @@ from airflow.utils.session import create_session from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType -from tests.conftest import initial_db_init -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields -from tests.test_utils.www import check_content_in_response, check_content_not_in_response + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator +from dev.tests_common.test_utils.db import ( + clear_db_dags, + clear_db_runs, + clear_rendered_ti_fields, + initial_db_init, +) +from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_robots.py b/tests/www/views/test_views_robots.py index 03d8547c04d4b..41f963b3bf218 100644 --- a/tests/www/views/test_views_robots.py +++ b/tests/www/views/test_views_robots.py @@ -18,7 +18,7 @@ import pytest -from tests.test_utils.config import conf_vars +from dev.tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_task_norun.py b/tests/www/views/test_views_task_norun.py index 2a39b2a60134e..2ff6ff9d7cd67 100644 --- a/tests/www/views/test_views_task_norun.py +++ b/tests/www/views/test_views_task_norun.py @@ -22,7 +22,7 @@ import pytest -from tests.test_utils.db import clear_db_runs +from dev.tests_common.test_utils.db import clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_tasks.py b/tests/www/views/test_views_tasks.py index 4dcb7252a3658..4e9de5c00fc9c 100644 --- a/tests/www/views/test_views_tasks.py +++ b/tests/www/views/test_views_tasks.py @@ -43,15 +43,20 @@ from airflow.utils.state import DagRunState, State from airflow.utils.types import DagRunType from airflow.www.views import TaskInstanceModelView, _safe_parse_datetime -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import ( + +from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom +from dev.tests_common.test_utils.www import ( + check_content_in_response, + check_content_not_in_response, + client_with_login, +) +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, delete_roles, delete_user, ) -from tests.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator -from tests.test_utils.config import conf_vars -from tests.test_utils.db import clear_db_runs, clear_db_xcom -from tests.test_utils.www import check_content_in_response, check_content_not_in_response, client_with_login if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_trigger_dag.py b/tests/www/views/test_views_trigger_dag.py index 0c9384a195f5e..d2c3817ede6db 100644 --- a/tests/www/views/test_views_trigger_dag.py +++ b/tests/www/views/test_views_trigger_dag.py @@ -32,9 +32,10 @@ from airflow.utils.json import WebEncoder from airflow.utils.session import create_session from airflow.utils.types import DagRunType -from tests.test_utils.api_connexion_utils import create_test_client -from tests.test_utils.config import conf_vars -from tests.test_utils.www import check_content_in_response + +from dev.tests_common.test_utils.config import conf_vars +from dev.tests_common.test_utils.www import check_content_in_response +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_test_client pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_variable.py b/tests/www/views/test_views_variable.py index b7fa8b37c52c8..dae1f2e9b7315 100644 --- a/tests/www/views/test_views_variable.py +++ b/tests/www/views/test_views_variable.py @@ -25,13 +25,14 @@ from airflow.models import Variable from airflow.security import permissions from airflow.utils.session import create_session -from tests.providers.fab.auth_manager.api_endpoints.api_connexion_utils import create_user -from tests.test_utils.www import ( + +from dev.tests_common.test_utils.www import ( _check_last_log, check_content_in_response, check_content_not_in_response, client_with_login, ) +from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user pytestmark = pytest.mark.db_test VARIABLE = { From 1114ab2d03c4c58a68f0a75f03c82a39385c60d0 Mon Sep 17 00:00:00 2001 From: sonu4578 Date: Wed, 9 Oct 2024 13:03:40 -0700 Subject: [PATCH 047/125] Improving validation of task retries to handle None values (#42532) * Improving validation of task retries to handle None values * Updated the validation check to use the "or" logical operator Co-authored-by: Jens Scheffler <95105677+jscheffl@users.noreply.github.com> * Added a comment for the new validation check * Update baseoperator.py * Update taskinstance.py --------- Co-authored-by: Jens Scheffler <95105677+jscheffl@users.noreply.github.com> --- airflow/models/baseoperator.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index 9e0c8e1e69b61..7b5c8e7c4f8c4 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -127,7 +127,9 @@ def parse_retries(retries: Any) -> int | None: - if retries is None or type(retries) == int: # noqa: E721 + if retries is None: + return 0 + elif type(retries) == int: # noqa: E721 return retries try: parsed_retries = int(retries) From 19c32b9e0dffd193414ac0a9ad41a16b6020ce5b Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Wed, 9 Oct 2024 21:31:26 +0100 Subject: [PATCH 048/125] Fix deprecated stage names for Pre-commit (#42872) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Before ``` ❯ pre-commit install [WARNING] top-level `default_stages` uses deprecated stage names (commit, push) which will be removed in a future version. run: `pre-commit migrate-config` to automatically fix this. pre-commit installed at .git/hooks/pre-commit ``` --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 748a85e860bb7..394e11ac20502 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,7 +15,7 @@ # specific language governing permissions and limitations # under the License. --- -default_stages: [commit, push] +default_stages: [pre-commit, pre-push] default_language_version: python: python3 node: 22.2.0 From 647845b0d1f1394bd06da39bfe1992d84ac0942a Mon Sep 17 00:00:00 2001 From: Pierre Jeambrun Date: Thu, 10 Oct 2024 04:32:28 +0800 Subject: [PATCH 049/125] Fix ui lint pre-commit interactive mode (#42854) --- scripts/ci/pre_commit/compile_ui_assets.py | 5 ++++- scripts/ci/pre_commit/compile_ui_assets_dev.py | 2 +- scripts/ci/pre_commit/lint_ui.py | 4 +++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/scripts/ci/pre_commit/compile_ui_assets.py b/scripts/ci/pre_commit/compile_ui_assets.py index 9189981069764..b4c38e3d7cc60 100755 --- a/scripts/ci/pre_commit/compile_ui_assets.py +++ b/scripts/ci/pre_commit/compile_ui_assets.py @@ -68,7 +68,10 @@ def get_directory_hash(directory: Path, skip_path_regexp: str | None = None) -> shutil.rmtree(dist_directory, ignore_errors=True) env = os.environ.copy() env["FORCE_COLOR"] = "true" - subprocess.check_call(["pnpm", "install", "--frozen-lockfile"], cwd=os.fspath(ui_directory)) + subprocess.check_call( + ["pnpm", "install", "--frozen-lockfile", "--config.confirmModulesPurge=false"], + cwd=os.fspath(ui_directory), + ) subprocess.check_call(["pnpm", "run", "build"], cwd=os.fspath(ui_directory), env=env) new_hash = get_directory_hash(ui_directory, skip_path_regexp=r".*node_modules.*") UI_HASH_FILE.write_text(new_hash) diff --git a/scripts/ci/pre_commit/compile_ui_assets_dev.py b/scripts/ci/pre_commit/compile_ui_assets_dev.py index 02b2e580946af..d820db8701eba 100755 --- a/scripts/ci/pre_commit/compile_ui_assets_dev.py +++ b/scripts/ci/pre_commit/compile_ui_assets_dev.py @@ -49,7 +49,7 @@ UI_ASSET_OUT_FILE.unlink(missing_ok=True) with open(UI_ASSET_OUT_DEV_MODE_FILE, "w") as f: subprocess.run( - ["pnpm", "install", "--frozen-lockfile"], + ["pnpm", "install", "--frozen-lockfile", "--config.confirmModulesPurge=false"], cwd=os.fspath(ui_directory), check=True, stdout=f, diff --git a/scripts/ci/pre_commit/lint_ui.py b/scripts/ci/pre_commit/lint_ui.py index 9604db170ae91..bac91b6dfc20f 100755 --- a/scripts/ci/pre_commit/lint_ui.py +++ b/scripts/ci/pre_commit/lint_ui.py @@ -29,7 +29,9 @@ if __name__ == "__main__": dir = Path("airflow") / "ui" subprocess.check_call(["pnpm", "config", "set", "store-dir", ".pnpm-store"], cwd=dir) - subprocess.check_call(["pnpm", "install", "--frozen-lockfile"], cwd=dir) + subprocess.check_call( + ["pnpm", "install", "--frozen-lockfile", "--config.confirmModulesPurge=false"], cwd=dir + ) subprocess.check_call(["pnpm", "codegen"], cwd=dir) subprocess.check_call(["pnpm", "format"], cwd=dir) subprocess.check_call(["pnpm", "lint:fix"], cwd=dir) From 8a4e7bda845bab4dfe33af0a8b0c9e6fcf0da9f3 Mon Sep 17 00:00:00 2001 From: GPK Date: Wed, 9 Oct 2024 21:54:19 +0100 Subject: [PATCH 050/125] update k8s tests urllib3 retry config status_forcelist and allowed_methods (#42871) --- kubernetes_tests/test_base.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/kubernetes_tests/test_base.py b/kubernetes_tests/test_base.py index 0f5b673db2f93..ac311daa52f8c 100644 --- a/kubernetes_tests/test_base.py +++ b/kubernetes_tests/test_base.py @@ -123,7 +123,12 @@ def _delete_airflow_pod(name=""): def _get_session_with_retries(self): session = requests.Session() session.auth = ("admin", "admin") - retries = Retry(total=3, backoff_factor=1) + retries = Retry( + total=3, + backoff_factor=1, + status_forcelist=[404], + allowed_methods=Retry.DEFAULT_ALLOWED_METHODS | frozenset(["PATCH", "POST"]), + ) session.mount("http://", HTTPAdapter(max_retries=retries)) session.mount("https://", HTTPAdapter(max_retries=retries)) return session From 35f7076da64aa85c9ba5cb46947643cf8a389f3d Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Wed, 9 Oct 2024 21:59:36 +0100 Subject: [PATCH 051/125] Bump `blacken-docs` pre-commit (#42880) Bump `blacken-docs` pre-commit from 1.18.0 -> 1.19.0 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 394e11ac20502..d492747856a92 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -227,7 +227,7 @@ repos: additional_dependencies: ["libcst>=1.1.0"] files: ^(providers/src/)?airflow/.*/(sensors|operators)/.*\.py$ - repo: https://github.com/asottile/blacken-docs - rev: 1.18.0 + rev: 1.19.0 hooks: - id: blacken-docs name: Run black on docs From 54005f87d42f9b349f7484406c858f18be04b807 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Wed, 9 Oct 2024 23:02:42 +0100 Subject: [PATCH 052/125] Exclude "not-ready" providers when building docs (#42873) This fixes the issue we saw in https://apache-airflow.slack.com/archives/C03G9H97MM2/p1728492002477169 This PR excludes the providers that aren't ready yet for building docs. --- .../authoring-and-scheduling/{assets.rst => datasets.rst} | 0 docs/apache-airflow/authoring-and-scheduling/index.rst | 2 +- docs/apache-airflow/templates-ref.rst | 6 +++--- docs/exts/provider_yaml_utils.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) rename docs/apache-airflow/authoring-and-scheduling/{assets.rst => datasets.rst} (100%) diff --git a/docs/apache-airflow/authoring-and-scheduling/assets.rst b/docs/apache-airflow/authoring-and-scheduling/datasets.rst similarity index 100% rename from docs/apache-airflow/authoring-and-scheduling/assets.rst rename to docs/apache-airflow/authoring-and-scheduling/datasets.rst diff --git a/docs/apache-airflow/authoring-and-scheduling/index.rst b/docs/apache-airflow/authoring-and-scheduling/index.rst index 5ec94d6ca7301..28cc46ab0617a 100644 --- a/docs/apache-airflow/authoring-and-scheduling/index.rst +++ b/docs/apache-airflow/authoring-and-scheduling/index.rst @@ -41,5 +41,5 @@ It's recommended that you first review the pages in :doc:`core concepts timetable diff --git a/docs/apache-airflow/templates-ref.rst b/docs/apache-airflow/templates-ref.rst index 5524c82f8cc95..e3de362191340 100644 --- a/docs/apache-airflow/templates-ref.rst +++ b/docs/apache-airflow/templates-ref.rst @@ -62,10 +62,10 @@ Variable Type Description ``{{ prev_end_date_success }}`` `pendulum.DateTime`_ End date from prior successful :class:`~airflow.models.dagrun.DagRun` (if available). | ``None`` ``{{ inlets }}`` list List of inlets declared on the task. -``{{ inlet_events }}`` dict[str, ...] Access past events of inlet assets. See :doc:`Assets `. Added in version 2.10. +``{{ inlet_events }}`` dict[str, ...] Access past events of inlet assets. See :doc:`Assets `. Added in version 2.10. ``{{ outlets }}`` list List of outlets declared on the task. ``{{ outlet_events }}`` dict[str, ...] | Accessors to attach information to asset events that will be emitted by the current task. - | See :doc:`Assets `. Added in version 2.10. + | See :doc:`Assets `. Added in version 2.10. ``{{ dag }}`` DAG The currently running :class:`~airflow.models.dag.DAG`. You can read more about DAGs in :doc:`DAGs `. ``{{ task }}`` BaseOperator | The currently running :class:`~airflow.models.baseoperator.BaseOperator`. You can read more about Tasks in :doc:`core-concepts/operators` ``{{ macros }}`` | A reference to the macros package. See Macros_ below. @@ -90,7 +90,7 @@ Variable Type Description | Added in version 2.5. ``{{ triggering_asset_events }}`` dict[str, | If in a Asset Scheduled DAG, a map of Asset URI to a list of triggering :class:`~airflow.models.asset.AssetEvent` list[AssetEvent]] | (there may be more than one, if there are multiple Assets with different frequencies). - | Read more here :doc:`Assets `. + | Read more here :doc:`Assets `. | Added in version 2.4. =========================================== ===================== =================================================================== diff --git a/docs/exts/provider_yaml_utils.py b/docs/exts/provider_yaml_utils.py index 6e5d3a835e1d0..ad99da3c10eaf 100644 --- a/docs/exts/provider_yaml_utils.py +++ b/docs/exts/provider_yaml_utils.py @@ -70,7 +70,7 @@ def load_package_data(include_suspended: bool = False) -> list[dict[str, Any]]: except jsonschema.ValidationError as ex: msg = f"Unable to parse: {provider_yaml_path}. Original error {type(ex).__name__}: {ex}" raise RuntimeError(msg) - if provider["state"] == "suspended" and not include_suspended: + if provider["state"] in ["suspended", "not-ready"] and not include_suspended: continue provider_yaml_dir = os.path.dirname(provider_yaml_path) provider["python-module"] = _filepath_to_module(provider_yaml_dir) From 420b24aa798f73bd6fa8c525e80958e9c9e85dec Mon Sep 17 00:00:00 2001 From: GPK Date: Thu, 10 Oct 2024 06:50:43 +0100 Subject: [PATCH 053/125] Move Hooks to Standard provider (#42794) --- airflow/providers_manager.py | 4 +- airflow/sensors/filesystem.py | 2 +- .../logging-monitoring/errors.rst | 2 +- .../operators-and-hooks-ref.rst | 4 +- .../providers/standard/hooks/__init__.py | 16 ++++++++ .../providers/standard}/hooks/filesystem.py | 0 .../standard}/hooks/package_index.py | 0 .../providers/standard}/hooks/subprocess.py | 4 +- .../providers/standard/operators/bash.py | 2 +- .../airflow/providers/standard/provider.yaml | 7 ++++ providers/tests/standard/hooks/__init__.py | 16 ++++++++ .../tests/standard/hooks/test_filesystem.py | 39 +++++++++++++++++++ .../standard}/hooks/test_package_index.py | 6 +-- .../tests/standard}/hooks/test_subprocess.py | 6 +-- tests/sensors/test_filesystem.py | 2 +- 15 files changed, 94 insertions(+), 16 deletions(-) create mode 100644 providers/src/airflow/providers/standard/hooks/__init__.py rename {airflow => providers/src/airflow/providers/standard}/hooks/filesystem.py (100%) rename {airflow => providers/src/airflow/providers/standard}/hooks/package_index.py (100%) rename {airflow => providers/src/airflow/providers/standard}/hooks/subprocess.py (96%) create mode 100644 providers/tests/standard/hooks/__init__.py create mode 100644 providers/tests/standard/hooks/test_filesystem.py rename {tests => providers/tests/standard}/hooks/test_package_index.py (93%) rename {tests => providers/tests/standard}/hooks/test_subprocess.py (95%) diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py index 1b1ca469f21d5..573d256d6e59a 100644 --- a/airflow/providers_manager.py +++ b/airflow/providers_manager.py @@ -36,8 +36,8 @@ from packaging.utils import canonicalize_name from airflow.exceptions import AirflowOptionalProviderFeatureException -from airflow.hooks.filesystem import FSHook -from airflow.hooks.package_index import PackageIndexHook +from airflow.providers.standard.hooks.filesystem import FSHook +from airflow.providers.standard.hooks.package_index import PackageIndexHook from airflow.typing_compat import ParamSpec from airflow.utils import yaml from airflow.utils.entry_points import entry_points_with_dist diff --git a/airflow/sensors/filesystem.py b/airflow/sensors/filesystem.py index 5d32ab07ad4e7..4496f5d6abfa4 100644 --- a/airflow/sensors/filesystem.py +++ b/airflow/sensors/filesystem.py @@ -25,7 +25,7 @@ from airflow.configuration import conf from airflow.exceptions import AirflowException -from airflow.hooks.filesystem import FSHook +from airflow.providers.standard.hooks.filesystem import FSHook from airflow.sensors.base import BaseSensorOperator from airflow.triggers.base import StartTriggerArgs from airflow.triggers.file import FileTrigger diff --git a/docs/apache-airflow/administration-and-deployment/logging-monitoring/errors.rst b/docs/apache-airflow/administration-and-deployment/logging-monitoring/errors.rst index cb09843422321..0ad3fa8c5127a 100644 --- a/docs/apache-airflow/administration-and-deployment/logging-monitoring/errors.rst +++ b/docs/apache-airflow/administration-and-deployment/logging-monitoring/errors.rst @@ -96,7 +96,7 @@ Impact of Sentry on Environment variables passed to Subprocess Hook When Sentry is enabled, by default it changes the standard library to pass all environment variables to subprocesses opened by Airflow. This changes the default behaviour of -:class:`airflow.hooks.subprocess.SubprocessHook` - always all environment variables are passed to the +:class:`airflow.providers.standard.hooks.subprocess.SubprocessHook` - always all environment variables are passed to the subprocess executed with specific set of environment variables. In this case not only the specified environment variables are passed but also all existing environment variables are passed with ``SUBPROCESS_`` prefix added. This happens also for all other subprocesses. diff --git a/docs/apache-airflow/operators-and-hooks-ref.rst b/docs/apache-airflow/operators-and-hooks-ref.rst index c82a4f3a66d73..655551705e41e 100644 --- a/docs/apache-airflow/operators-and-hooks-ref.rst +++ b/docs/apache-airflow/operators-and-hooks-ref.rst @@ -106,8 +106,8 @@ For details see: :doc:`apache-airflow-providers:operators-and-hooks-ref/index`. * - Hooks - Guides - * - :mod:`airflow.hooks.filesystem` + * - :mod:`airflow.providers.standard.hooks.filesystem` - - * - :mod:`airflow.hooks.subprocess` + * - :mod:`airflow.providers.standard.hooks.subprocess` - diff --git a/providers/src/airflow/providers/standard/hooks/__init__.py b/providers/src/airflow/providers/standard/hooks/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/src/airflow/providers/standard/hooks/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/hooks/filesystem.py b/providers/src/airflow/providers/standard/hooks/filesystem.py similarity index 100% rename from airflow/hooks/filesystem.py rename to providers/src/airflow/providers/standard/hooks/filesystem.py diff --git a/airflow/hooks/package_index.py b/providers/src/airflow/providers/standard/hooks/package_index.py similarity index 100% rename from airflow/hooks/package_index.py rename to providers/src/airflow/providers/standard/hooks/package_index.py diff --git a/airflow/hooks/subprocess.py b/providers/src/airflow/providers/standard/hooks/subprocess.py similarity index 96% rename from airflow/hooks/subprocess.py rename to providers/src/airflow/providers/standard/hooks/subprocess.py index bc20b5c20b4c5..9e578a7d8034b 100644 --- a/airflow/hooks/subprocess.py +++ b/providers/src/airflow/providers/standard/hooks/subprocess.py @@ -52,8 +52,8 @@ def run_command( :param env: Optional dict containing environment variables to be made available to the shell environment in which ``command`` will be executed. If omitted, ``os.environ`` will be used. Note, that in case you have Sentry configured, original variables from the environment - will also be passed to the subprocess with ``SUBPROCESS_`` prefix. See - :doc:`/administration-and-deployment/logging-monitoring/errors` for details. + will also be passed to the subprocess with ``SUBPROCESS_`` prefix. See: + https://airflow.apache.org/docs/apache-airflow/stable/administration-and-deployment/logging-monitoring/errors.html for details. :param output_encoding: encoding to use for decoding stdout :param cwd: Working directory to run the command in. If None (default), the command is run in a temporary directory. diff --git a/providers/src/airflow/providers/standard/operators/bash.py b/providers/src/airflow/providers/standard/operators/bash.py index 2ec0341a0d1e2..bf4a943df6e08 100644 --- a/providers/src/airflow/providers/standard/operators/bash.py +++ b/providers/src/airflow/providers/standard/operators/bash.py @@ -24,8 +24,8 @@ from typing import TYPE_CHECKING, Any, Callable, Container, Sequence, cast from airflow.exceptions import AirflowException, AirflowSkipException -from airflow.hooks.subprocess import SubprocessHook from airflow.models.baseoperator import BaseOperator +from airflow.providers.standard.hooks.subprocess import SubprocessHook from airflow.utils.operator_helpers import context_to_airflow_vars from airflow.utils.types import ArgNotSet diff --git a/providers/src/airflow/providers/standard/provider.yaml b/providers/src/airflow/providers/standard/provider.yaml index 2d4c4f29bef5c..b3111d62b1a66 100644 --- a/providers/src/airflow/providers/standard/provider.yaml +++ b/providers/src/airflow/providers/standard/provider.yaml @@ -52,3 +52,10 @@ sensors: - airflow.providers.standard.sensors.time - airflow.providers.standard.sensors.weekday - airflow.providers.standard.sensors.bash + +hooks: + - integration-name: Standard + python-modules: + - airflow.providers.standard.hooks.filesystem + - airflow.providers.standard.hooks.package_index + - airflow.providers.standard.hooks.subprocess diff --git a/providers/tests/standard/hooks/__init__.py b/providers/tests/standard/hooks/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/tests/standard/hooks/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/tests/standard/hooks/test_filesystem.py b/providers/tests/standard/hooks/test_filesystem.py new file mode 100644 index 0000000000000..bbcd22dc94219 --- /dev/null +++ b/providers/tests/standard/hooks/test_filesystem.py @@ -0,0 +1,39 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import pytest + +from airflow.providers.standard.hooks.filesystem import FSHook + +pytestmark = pytest.mark.db_test + + +class TestFSHook: + def test_get_ui_field_behaviour(self): + fs_hook = FSHook() + assert fs_hook.get_ui_field_behaviour() == { + "hidden_fields": ["host", "schema", "port", "login", "password", "extra"], + "relabeling": {}, + "placeholders": {}, + } + + def test_get_path(self): + fs_hook = FSHook(fs_conn_id="fs_default") + + assert fs_hook.get_path() == "/" diff --git a/tests/hooks/test_package_index.py b/providers/tests/standard/hooks/test_package_index.py similarity index 93% rename from tests/hooks/test_package_index.py rename to providers/tests/standard/hooks/test_package_index.py index 9da429c5a09cf..6a90db0715d81 100644 --- a/tests/hooks/test_package_index.py +++ b/providers/tests/standard/hooks/test_package_index.py @@ -21,8 +21,8 @@ import pytest -from airflow.hooks.package_index import PackageIndexHook from airflow.models.connection import Connection +from airflow.providers.standard.hooks.package_index import PackageIndexHook class MockConnection(Connection): @@ -73,7 +73,7 @@ def mock_get_connection(monkeypatch: pytest.MonkeyPatch, request: pytest.Fixture password: str | None = testdata.get("password", None) expected_result: str | None = testdata.get("expected_result", None) monkeypatch.setattr( - "airflow.hooks.package_index.PackageIndexHook.get_connection", + "airflow.providers.standard.hooks.package_index.PackageIndexHook.get_connection", lambda *_: MockConnection(host, login, password), ) return expected_result @@ -104,7 +104,7 @@ class MockProc: return MockProc() - monkeypatch.setattr("airflow.hooks.package_index.subprocess.run", mock_run) + monkeypatch.setattr("airflow.providers.standard.hooks.package_index.subprocess.run", mock_run) hook_instance = PackageIndexHook() if mock_get_connection: diff --git a/tests/hooks/test_subprocess.py b/providers/tests/standard/hooks/test_subprocess.py similarity index 95% rename from tests/hooks/test_subprocess.py rename to providers/tests/standard/hooks/test_subprocess.py index 0f625be816887..2b2e9473359e5 100644 --- a/tests/hooks/test_subprocess.py +++ b/providers/tests/standard/hooks/test_subprocess.py @@ -26,7 +26,7 @@ import pytest -from airflow.hooks.subprocess import SubprocessHook +from airflow.providers.standard.hooks.subprocess import SubprocessHook OS_ENV_KEY = "SUBPROCESS_ENV_TEST" OS_ENV_VAL = "this-is-from-os-environ" @@ -81,11 +81,11 @@ def test_return_value(self, val, expected): @mock.patch.dict("os.environ", clear=True) @mock.patch( - "airflow.hooks.subprocess.TemporaryDirectory", + "airflow.providers.standard.hooks.subprocess.TemporaryDirectory", return_value=MagicMock(__enter__=MagicMock(return_value="/tmp/airflowtmpcatcat")), ) @mock.patch( - "airflow.hooks.subprocess.Popen", + "airflow.providers.standard.hooks.subprocess.Popen", return_value=MagicMock(stdout=MagicMock(readline=MagicMock(side_effect=StopIteration), returncode=0)), ) def test_should_exec_subprocess(self, mock_popen, mock_temporary_directory): diff --git a/tests/sensors/test_filesystem.py b/tests/sensors/test_filesystem.py index 1fb123cfe7248..641f2f218f2db 100644 --- a/tests/sensors/test_filesystem.py +++ b/tests/sensors/test_filesystem.py @@ -40,7 +40,7 @@ @pytest.mark.skip_if_database_isolation_mode # Test is broken in db isolation mode class TestFileSensor: def setup_method(self): - from airflow.hooks.filesystem import FSHook + from airflow.providers.standard.hooks.filesystem import FSHook hook = FSHook() args = {"owner": "airflow", "start_date": DEFAULT_DATE} From b7c8ebb7d9f0d5a3d1598667371ab69582b4e66a Mon Sep 17 00:00:00 2001 From: Pierre Jeambrun Date: Thu, 10 Oct 2024 15:15:41 +0800 Subject: [PATCH 054/125] Fix dag warning documentation (#42858) --- airflow/api_connexion/openapi/v1.yaml | 2 +- airflow/www/static/js/types/api-generated.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/airflow/api_connexion/openapi/v1.yaml b/airflow/api_connexion/openapi/v1.yaml index 3e120ce2f5118..305653e781797 100644 --- a/airflow/api_connexion/openapi/v1.yaml +++ b/airflow/api_connexion/openapi/v1.yaml @@ -3463,7 +3463,7 @@ components: allOf: - type: object properties: - import_errors: + dag_warnings: type: array items: $ref: "#/components/schemas/DagWarning" diff --git a/airflow/www/static/js/types/api-generated.ts b/airflow/www/static/js/types/api-generated.ts index 87f89d19cd714..6aeb8721e4280 100644 --- a/airflow/www/static/js/types/api-generated.ts +++ b/airflow/www/static/js/types/api-generated.ts @@ -1263,7 +1263,7 @@ export interface components { }; /** @description Collection of DAG warnings. */ DagWarningCollection: { - import_errors?: components["schemas"]["DagWarning"][]; + dag_warnings?: components["schemas"]["DagWarning"][]; } & components["schemas"]["CollectionInfo"]; SetDagRunNote: { /** @description Custom notes left by users for this Dag Run. */ From 7250e2642459ac8d1215c3ab5bdf8786beca51dd Mon Sep 17 00:00:00 2001 From: Elad Kalif <45845474+eladkal@users.noreply.github.com> Date: Thu, 10 Oct 2024 14:22:24 +0700 Subject: [PATCH 055/125] Fix issue generation for provider after folder restructure (#42883) --- .../src/airflow_breeze/commands/release_management_commands.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index d286e7ed4e660..b0a1ca2877332 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -137,6 +137,7 @@ run_with_pool, ) from airflow_breeze.utils.path_utils import ( + AIRFLOW_PROVIDERS_SRC, AIRFLOW_SOURCES_ROOT, CONSTRAINTS_CACHE_DIR, DIST_DIR, @@ -2125,7 +2126,7 @@ class ProviderPRInfo(NamedTuple): pull_request_list = [pull_requests[pr] for pr in provider_prs[provider_id] if pr in pull_requests] provider_yaml_dict = yaml.safe_load( ( - AIRFLOW_SOURCES_ROOT + AIRFLOW_PROVIDERS_SRC / "airflow" / "providers" / provider_id.replace(".", os.sep) From 937cc299fe8833a48653b39a7c1ef7aa2a660426 Mon Sep 17 00:00:00 2001 From: Omkar P <45419097+omkar-foss@users.noreply.github.com> Date: Thu, 10 Oct 2024 13:17:49 +0530 Subject: [PATCH 056/125] AIP-84 Migrate the public endpoint Get DAG to FastAPI (#42848) * Migrate the public endpoint Get DAG to FastAPI * Use proper name for test function --- airflow/api_fastapi/openapi/v1-generated.yaml | 62 +++++++++++++++++-- airflow/api_fastapi/serializers/dags.py | 40 +++++++----- airflow/api_fastapi/views/public/dags.py | 20 ++++++ airflow/ui/openapi-gen/queries/common.ts | 16 +++++ airflow/ui/openapi-gen/queries/prefetch.ts | 20 ++++++ airflow/ui/openapi-gen/queries/queries.ts | 26 ++++++++ airflow/ui/openapi-gen/queries/suspense.ts | 26 ++++++++ .../ui/openapi-gen/requests/services.gen.ts | 45 +++++++++++--- airflow/ui/openapi-gen/requests/types.gen.ts | 49 ++++++++++++--- tests/api_fastapi/views/public/test_dags.py | 48 ++++++++++++++ 10 files changed, 313 insertions(+), 39 deletions(-) diff --git a/airflow/api_fastapi/openapi/v1-generated.yaml b/airflow/api_fastapi/openapi/v1-generated.yaml index 463cc1e92f4d6..fb19c1abd1c12 100644 --- a/airflow/api_fastapi/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/openapi/v1-generated.yaml @@ -291,13 +291,13 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/dags/{dag_id}/details: + /public/dags/{dag_id}: get: tags: - DAG - summary: Get Dag Details - description: Get details of DAG. - operationId: get_dag_details + summary: Get Dag + description: Get basic information about a DAG. + operationId: get_dag parameters: - name: dag_id in: path @@ -311,7 +311,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/DAGDetailsResponse' + $ref: '#/components/schemas/DAGResponse' '400': content: application/json: @@ -342,7 +342,6 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Unprocessable Entity - /public/dags/{dag_id}: patch: tags: - DAG @@ -409,6 +408,57 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /public/dags/{dag_id}/details: + get: + tags: + - DAG + summary: Get Dag Details + description: Get details of DAG. + operationId: get_dag_details + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGDetailsResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unprocessable Entity /public/connections/{connection_id}: delete: tags: diff --git a/airflow/api_fastapi/serializers/dags.py b/airflow/api_fastapi/serializers/dags.py index 17677054c4c1c..9879badf25048 100644 --- a/airflow/api_fastapi/serializers/dags.py +++ b/airflow/api_fastapi/serializers/dags.py @@ -24,9 +24,9 @@ from itsdangerous import URLSafeSerializer from pendulum.tz.timezone import FixedTimezone, Timezone from pydantic import ( - AliasChoices, + AliasGenerator, BaseModel, - Field, + ConfigDict, computed_field, field_validator, ) @@ -77,6 +77,14 @@ def get_owners(cls, v: Any) -> list[str] | None: return v.split(",") return v + @field_validator("timetable_summary", mode="before") + @classmethod + def get_timetable_summary(cls, tts: str | None) -> str | None: + """Validate the string representation of timetable_summary.""" + if tts is None or tts == "None": + return None + return str(tts) + # Mypy issue https://github.com/python/mypy/issues/1362 @computed_field # type: ignore[misc] @property @@ -103,9 +111,7 @@ class DAGDetailsResponse(DAGResponse): """Specific serializer for DAG Details responses.""" catchup: bool - dag_run_timeout: timedelta | None = Field( - validation_alias=AliasChoices("dag_run_timeout", "dagrun_timeout") - ) + dag_run_timeout: timedelta | None dataset_expression: dict | None doc_md: str | None start_date: datetime | None @@ -114,11 +120,19 @@ class DAGDetailsResponse(DAGResponse): orientation: str params: abc.MutableMapping | None render_template_as_native_obj: bool - template_search_path: Iterable[str] | None = Field( - validation_alias=AliasChoices("template_search_path", "template_searchpath") - ) + template_search_path: Iterable[str] | None timezone: str | None - last_parsed: datetime | None = Field(validation_alias=AliasChoices("last_parsed", "last_loaded")) + last_parsed: datetime | None + + model_config = ConfigDict( + alias_generator=AliasGenerator( + validation_alias=lambda field_name: { + "dag_run_timeout": "dagrun_timeout", + "last_parsed": "last_loaded", + "template_search_path": "template_searchpath", + }.get(field_name, field_name), + ) + ) @field_validator("timezone", mode="before") @classmethod @@ -128,14 +142,6 @@ def get_timezone(cls, tz: Timezone | FixedTimezone) -> str | None: return None return str(tz) - @field_validator("timetable_summary", mode="before") - @classmethod - def get_timetable_summary(cls, tts: str | None) -> str | None: - """Validate the string representation of timetable_summary.""" - if tts is None or tts == "None": - return None - return str(tts) - @field_validator("params", mode="before") @classmethod def get_params(cls, params: abc.MutableMapping | None) -> dict | None: diff --git a/airflow/api_fastapi/views/public/dags.py b/airflow/api_fastapi/views/public/dags.py index f0df86b787a20..ca0f44162eb2d 100644 --- a/airflow/api_fastapi/views/public/dags.py +++ b/airflow/api_fastapi/views/public/dags.py @@ -92,6 +92,26 @@ async def get_dags( ) +@dags_router.get("/{dag_id}", responses=create_openapi_http_exception_doc([400, 401, 403, 404, 422])) +async def get_dag( + dag_id: str, session: Annotated[Session, Depends(get_session)], request: Request +) -> DAGResponse: + """Get basic information about a DAG.""" + dag: DAG = request.app.state.dag_bag.get_dag(dag_id) + if not dag: + raise HTTPException(404, f"Dag with id {dag_id} was not found") + + dag_model: DagModel = session.get(DagModel, dag_id) + if not dag_model: + raise HTTPException(404, f"Unable to obtain dag with id {dag_id} from session") + + for key, value in dag.__dict__.items(): + if not key.startswith("_") and not hasattr(dag_model, key): + setattr(dag_model, key, value) + + return DAGResponse.model_validate(dag_model, from_attributes=True) + + @dags_router.get("/{dag_id}/details", responses=create_openapi_http_exception_doc([400, 401, 403, 404, 422])) async def get_dag_details( dag_id: str, session: Annotated[Session, Depends(get_session)], request: Request diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index 72fd0ef9ccde1..a4d65c69003f1 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -98,6 +98,22 @@ export const UseDagServiceGetDagsKeyFn = ( }, ]), ]; +export type DagServiceGetDagDefaultResponse = Awaited< + ReturnType +>; +export type DagServiceGetDagQueryResult< + TData = DagServiceGetDagDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useDagServiceGetDagKey = "DagServiceGetDag"; +export const UseDagServiceGetDagKeyFn = ( + { + dagId, + }: { + dagId: string; + }, + queryKey?: Array, +) => [useDagServiceGetDagKey, ...(queryKey ?? [{ dagId }])]; export type DagServiceGetDagDetailsDefaultResponse = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow/ui/openapi-gen/queries/prefetch.ts index a114b9dc92c6c..8bd691ca33be4 100644 --- a/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow/ui/openapi-gen/queries/prefetch.ts @@ -126,6 +126,26 @@ export const prefetchUseDagServiceGetDags = ( tags, }), }); +/** + * Get Dag + * Get basic information about a DAG. + * @param data The data for the request. + * @param data.dagId + * @returns DAGResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagServiceGetDag = ( + queryClient: QueryClient, + { + dagId, + }: { + dagId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }), + queryFn: () => DagService.getDag({ dagId }), + }); /** * Get Dag Details * Get details of DAG. diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index a3ce02257160a..51b8f4fb051d7 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -153,6 +153,32 @@ export const useDagServiceGetDags = < }) as TData, ...options, }); +/** + * Get Dag + * Get basic information about a DAG. + * @param data The data for the request. + * @param data.dagId + * @returns DAGResponse Successful Response + * @throws ApiError + */ +export const useDagServiceGetDag = < + TData = Common.DagServiceGetDagDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + }: { + dagId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }, queryKey), + queryFn: () => DagService.getDag({ dagId }) as TData, + ...options, + }); /** * Get Dag Details * Get details of DAG. diff --git a/airflow/ui/openapi-gen/queries/suspense.ts b/airflow/ui/openapi-gen/queries/suspense.ts index fbef843c6e0ab..b437007468f20 100644 --- a/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow/ui/openapi-gen/queries/suspense.ts @@ -148,6 +148,32 @@ export const useDagServiceGetDagsSuspense = < }) as TData, ...options, }); +/** + * Get Dag + * Get basic information about a DAG. + * @param data The data for the request. + * @param data.dagId + * @returns DAGResponse Successful Response + * @throws ApiError + */ +export const useDagServiceGetDagSuspense = < + TData = Common.DagServiceGetDagDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + }: { + dagId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }, queryKey), + queryFn: () => DagService.getDag({ dagId }) as TData, + ...options, + }); /** * Get Dag Details * Get details of DAG. diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 7f61fd32f3493..24fbb9c29c46a 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -11,10 +11,12 @@ import type { GetDagsResponse, PatchDagsData, PatchDagsResponse, - GetDagDetailsData, - GetDagDetailsResponse, + GetDagData, + GetDagResponse, PatchDagData, PatchDagResponse, + GetDagDetailsData, + GetDagDetailsResponse, DeleteConnectionData, DeleteConnectionResponse, GetConnectionData, @@ -166,19 +168,17 @@ export class DagService { } /** - * Get Dag Details - * Get details of DAG. + * Get Dag + * Get basic information about a DAG. * @param data The data for the request. * @param data.dagId - * @returns DAGDetailsResponse Successful Response + * @returns DAGResponse Successful Response * @throws ApiError */ - public static getDagDetails( - data: GetDagDetailsData, - ): CancelablePromise { + public static getDag(data: GetDagData): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/dags/{dag_id}/details", + url: "/public/dags/{dag_id}", path: { dag_id: data.dagId, }, @@ -225,6 +225,33 @@ export class DagService { }, }); } + + /** + * Get Dag Details + * Get details of DAG. + * @param data The data for the request. + * @param data.dagId + * @returns DAGDetailsResponse Successful Response + * @throws ApiError + */ + public static getDagDetails( + data: GetDagDetailsData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/dags/{dag_id}/details", + path: { + dag_id: data.dagId, + }, + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Unprocessable Entity", + }, + }); + } } export class ConnectionService { diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 7b5fc54065a01..368c981b9da1e 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -260,11 +260,11 @@ export type PatchDagsData = { export type PatchDagsResponse = DAGCollectionResponse; -export type GetDagDetailsData = { +export type GetDagData = { dagId: string; }; -export type GetDagDetailsResponse = DAGDetailsResponse; +export type GetDagResponse = DAGResponse; export type PatchDagData = { dagId: string; @@ -274,6 +274,12 @@ export type PatchDagData = { export type PatchDagResponse = DAGResponse; +export type GetDagDetailsData = { + dagId: string; +}; + +export type GetDagDetailsResponse = DAGDetailsResponse; + export type DeleteConnectionData = { connectionId: string; }; @@ -379,14 +385,14 @@ export type $OpenApiTs = { }; }; }; - "/public/dags/{dag_id}/details": { + "/public/dags/{dag_id}": { get: { - req: GetDagDetailsData; + req: GetDagData; res: { /** * Successful Response */ - 200: DAGDetailsResponse; + 200: DAGResponse; /** * Bad Request */ @@ -409,8 +415,6 @@ export type $OpenApiTs = { 422: HTTPExceptionResponse; }; }; - }; - "/public/dags/{dag_id}": { patch: { req: PatchDagData; res: { @@ -441,6 +445,37 @@ export type $OpenApiTs = { }; }; }; + "/public/dags/{dag_id}/details": { + get: { + req: GetDagDetailsData; + res: { + /** + * Successful Response + */ + 200: DAGDetailsResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Unprocessable Entity + */ + 422: HTTPExceptionResponse; + }; + }; + }; "/public/connections/{connection_id}": { delete: { req: DeleteConnectionData; diff --git a/tests/api_fastapi/views/public/test_dags.py b/tests/api_fastapi/views/public/test_dags.py index 7ac93a2f2e07d..5512f7bb13849 100644 --- a/tests/api_fastapi/views/public/test_dags.py +++ b/tests/api_fastapi/views/public/test_dags.py @@ -303,3 +303,51 @@ def test_dag_details(test_client, query_params, dag_id, expected_status_code, da "timezone": UTC_JSON_REPR, } assert res_json == expected + + +@pytest.mark.parametrize( + "query_params, dag_id, expected_status_code, dag_display_name", + [ + ({}, "fake_dag_id", 404, "fake_dag"), + ({}, DAG2_ID, 200, DAG2_DISPLAY_NAME), + ], +) +def test_get_dag(test_client, query_params, dag_id, expected_status_code, dag_display_name): + response = test_client.get(f"/public/dags/{dag_id}", params=query_params) + assert response.status_code == expected_status_code + if expected_status_code != 200: + return + + # Match expected and actual responses below. + res_json = response.json() + last_parsed_time = res_json["last_parsed_time"] + file_token = res_json["file_token"] + expected = { + "dag_id": dag_id, + "dag_display_name": dag_display_name, + "description": None, + "fileloc": "/opt/airflow/tests/api_fastapi/views/public/test_dags.py", + "file_token": file_token, + "is_paused": False, + "is_active": True, + "owners": ["airflow"], + "timetable_summary": None, + "tags": [], + "next_dagrun": None, + "has_task_concurrency_limits": True, + "next_dagrun_data_interval_start": None, + "next_dagrun_data_interval_end": None, + "max_active_runs": 16, + "max_consecutive_failed_dag_runs": 0, + "next_dagrun_create_after": None, + "last_expired": None, + "max_active_tasks": 16, + "last_pickled": None, + "default_view": "grid", + "last_parsed_time": last_parsed_time, + "scheduler_lock": None, + "timetable_description": "Never, external triggers only", + "has_import_errors": False, + "pickle_id": None, + } + assert res_json == expected From efcb554fbc3e3150e3cf9e11d80148ce57cf63be Mon Sep 17 00:00:00 2001 From: Kalyan R Date: Thu, 10 Oct 2024 13:33:45 +0530 Subject: [PATCH 057/125] AIP-84 Migrate GET Dag Run endpoint to FastAPI (#42725) * get dag_run init * add serializer * Merge branch 'main' of https://github.com/apache/airflow into kalyan/AIP-84/get_dag_run * add types * add test * working tests * add note to DagRunResponse * add note * add test to test non Null note * Update airflow/api_fastapi/views/public/dag_run.py Co-authored-by: Pierre Jeambrun * Update airflow/api_fastapi/views/public/dag_run.py Co-authored-by: Pierre Jeambrun * Merge branch 'main' of https://github.com/apache/airflow into kalyan/AIP-84/get_dag_run * add 404 test --------- Co-authored-by: Pierre Jeambrun --- .../endpoints/dag_run_endpoint.py | 2 + airflow/api_fastapi/openapi/v1-generated.yaml | 153 +++++++++++++++++ airflow/api_fastapi/serializers/dag_run.py | 44 +++++ airflow/api_fastapi/views/public/__init__.py | 2 + airflow/api_fastapi/views/public/dag_run.py | 44 +++++ airflow/ui/openapi-gen/queries/common.ts | 19 ++ airflow/ui/openapi-gen/queries/prefetch.ts | 23 +++ airflow/ui/openapi-gen/queries/queries.ts | 32 ++++ airflow/ui/openapi-gen/queries/suspense.ts | 32 ++++ .../ui/openapi-gen/requests/schemas.gen.ts | 162 ++++++++++++++++++ .../ui/openapi-gen/requests/services.gen.ts | 31 ++++ airflow/ui/openapi-gen/requests/types.gen.ts | 78 +++++++++ .../api_fastapi/views/public/test_dag_run.py | 137 +++++++++++++++ 13 files changed, 759 insertions(+) create mode 100644 airflow/api_fastapi/serializers/dag_run.py create mode 100644 airflow/api_fastapi/views/public/dag_run.py create mode 100644 tests/api_fastapi/views/public/test_dag_run.py diff --git a/airflow/api_connexion/endpoints/dag_run_endpoint.py b/airflow/api_connexion/endpoints/dag_run_endpoint.py index 44891c0ef2c84..a862b7c969503 100644 --- a/airflow/api_connexion/endpoints/dag_run_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_run_endpoint.py @@ -63,6 +63,7 @@ from airflow.models import DagModel, DagRun from airflow.timetables.base import DataInterval from airflow.utils.airflow_flask_app import get_airflow_app +from airflow.utils.api_migration import mark_fastapi_migration_done from airflow.utils.db import get_query_count from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.state import DagRunState @@ -90,6 +91,7 @@ def delete_dag_run(*, dag_id: str, dag_run_id: str, session: Session = NEW_SESSI return NoContent, HTTPStatus.NO_CONTENT +@mark_fastapi_migration_done @security.requires_access_dag("GET", DagAccessEntity.RUN) @provide_session def get_dag_run( diff --git a/airflow/api_fastapi/openapi/v1-generated.yaml b/airflow/api_fastapi/openapi/v1-generated.yaml index fb19c1abd1c12..7debfbb1008af 100644 --- a/airflow/api_fastapi/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/openapi/v1-generated.yaml @@ -629,6 +629,56 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /public/dags/{dag_id}/dagRuns/{dag_run_id}: + get: + tags: + - DagRun + summary: Get Dag Run + operationId: get_dag_run + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' components: schemas: ConnectionResponse: @@ -1097,6 +1147,87 @@ components: - file_token title: DAGResponse description: DAG serializer for responses. + DAGRunResponse: + properties: + run_id: + anyOf: + - type: string + - type: 'null' + title: Run Id + dag_id: + type: string + title: Dag Id + logical_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date + start_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date + end_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date + data_interval_start: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Data Interval Start + data_interval_end: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Data Interval End + last_scheduling_decision: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Last Scheduling Decision + run_type: + $ref: '#/components/schemas/DagRunType' + state: + $ref: '#/components/schemas/DagRunState' + external_trigger: + type: boolean + title: External Trigger + triggered_by: + $ref: '#/components/schemas/DagRunTriggeredByType' + conf: + type: object + title: Conf + note: + anyOf: + - type: string + - type: 'null' + title: Note + type: object + required: + - run_id + - dag_id + - logical_date + - start_date + - end_date + - data_interval_start + - data_interval_end + - last_scheduling_decision + - run_type + - state + - external_trigger + - triggered_by + - conf + - note + title: DAGRunResponse + description: DAG Run serializer for responses. DAGRunStates: properties: queued: @@ -1157,6 +1288,28 @@ components: so please ensure that their values always match the ones with the same name in TaskInstanceState.' + DagRunTriggeredByType: + type: string + enum: + - cli + - operator + - rest_api + - ui + - test + - timetable + - dataset + - backfill + title: DagRunTriggeredByType + description: Class with TriggeredBy types for DagRun. + DagRunType: + type: string + enum: + - backfill + - scheduled + - manual + - dataset_triggered + title: DagRunType + description: Class with DagRun types. DagTagPydantic: properties: name: diff --git a/airflow/api_fastapi/serializers/dag_run.py b/airflow/api_fastapi/serializers/dag_run.py new file mode 100644 index 0000000000000..4622fac645c07 --- /dev/null +++ b/airflow/api_fastapi/serializers/dag_run.py @@ -0,0 +1,44 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel, Field + +from airflow.utils.state import DagRunState +from airflow.utils.types import DagRunTriggeredByType, DagRunType + + +class DAGRunResponse(BaseModel): + """DAG Run serializer for responses.""" + + dag_run_id: str | None = Field(alias="run_id") + dag_id: str + logical_date: datetime | None + start_date: datetime | None + end_date: datetime | None + data_interval_start: datetime | None + data_interval_end: datetime | None + last_scheduling_decision: datetime | None + run_type: DagRunType + state: DagRunState + external_trigger: bool + triggered_by: DagRunTriggeredByType + conf: dict + note: str | None diff --git a/airflow/api_fastapi/views/public/__init__.py b/airflow/api_fastapi/views/public/__init__.py index 4e02d9ab43bcf..9d90a0966802c 100644 --- a/airflow/api_fastapi/views/public/__init__.py +++ b/airflow/api_fastapi/views/public/__init__.py @@ -18,6 +18,7 @@ from __future__ import annotations from airflow.api_fastapi.views.public.connections import connections_router +from airflow.api_fastapi.views.public.dag_run import dag_run_router from airflow.api_fastapi.views.public.dags import dags_router from airflow.api_fastapi.views.public.variables import variables_router from airflow.api_fastapi.views.router import AirflowRouter @@ -28,3 +29,4 @@ public_router.include_router(dags_router) public_router.include_router(connections_router) public_router.include_router(variables_router) +public_router.include_router(dag_run_router) diff --git a/airflow/api_fastapi/views/public/dag_run.py b/airflow/api_fastapi/views/public/dag_run.py new file mode 100644 index 0000000000000..d39fb6f2f331c --- /dev/null +++ b/airflow/api_fastapi/views/public/dag_run.py @@ -0,0 +1,44 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from fastapi import Depends, HTTPException +from sqlalchemy import select +from sqlalchemy.orm import Session +from typing_extensions import Annotated + +from airflow.api_fastapi.db.common import get_session +from airflow.api_fastapi.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.serializers.dag_run import DAGRunResponse +from airflow.api_fastapi.views.router import AirflowRouter +from airflow.models import DagRun + +dag_run_router = AirflowRouter(tags=["DagRun"], prefix="/dags/{dag_id}/dagRuns") + + +@dag_run_router.get("/{dag_run_id}", responses=create_openapi_http_exception_doc([401, 403, 404])) +async def get_dag_run( + dag_id: str, dag_run_id: str, session: Annotated[Session, Depends(get_session)] +) -> DAGRunResponse: + dag_run = session.scalar(select(DagRun).filter_by(dag_id=dag_id, run_id=dag_run_id)) + if dag_run is None: + raise HTTPException( + 404, f"The DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` was not found" + ) + + return DAGRunResponse.model_validate(dag_run, from_attributes=True) diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index a4d65c69003f1..aaff196c0791d 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -4,6 +4,7 @@ import { UseQueryResult } from "@tanstack/react-query"; import { AssetService, ConnectionService, + DagRunService, DagService, DashboardService, VariableService, @@ -166,6 +167,24 @@ export const UseVariableServiceGetVariableKeyFn = ( }, queryKey?: Array, ) => [useVariableServiceGetVariableKey, ...(queryKey ?? [{ variableKey }])]; +export type DagRunServiceGetDagRunDefaultResponse = Awaited< + ReturnType +>; +export type DagRunServiceGetDagRunQueryResult< + TData = DagRunServiceGetDagRunDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useDagRunServiceGetDagRunKey = "DagRunServiceGetDagRun"; +export const UseDagRunServiceGetDagRunKeyFn = ( + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, + queryKey?: Array, +) => [useDagRunServiceGetDagRunKey, ...(queryKey ?? [{ dagId, dagRunId }])]; export type DagServicePatchDagsMutationResult = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow/ui/openapi-gen/queries/prefetch.ts index 8bd691ca33be4..3e194302f4be0 100644 --- a/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow/ui/openapi-gen/queries/prefetch.ts @@ -4,6 +4,7 @@ import { type QueryClient } from "@tanstack/react-query"; import { AssetService, ConnectionService, + DagRunService, DagService, DashboardService, VariableService, @@ -206,3 +207,25 @@ export const prefetchUseVariableServiceGetVariable = ( queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }), queryFn: () => VariableService.getVariable({ variableKey }), }); +/** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagRunServiceGetDagRun = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }), + queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }), + }); diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index 51b8f4fb051d7..19bb17b342a84 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -9,6 +9,7 @@ import { import { AssetService, ConnectionService, + DagRunService, DagService, DashboardService, VariableService, @@ -263,6 +264,37 @@ export const useVariableServiceGetVariable = < queryFn: () => VariableService.getVariable({ variableKey }) as TData, ...options, }); +/** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const useDagRunServiceGetDagRun = < + TData = Common.DagRunServiceGetDagRunDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagRunServiceGetDagRunKeyFn( + { dagId, dagRunId }, + queryKey, + ), + queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, + ...options, + }); /** * Patch Dags * Patch multiple DAGs. diff --git a/airflow/ui/openapi-gen/queries/suspense.ts b/airflow/ui/openapi-gen/queries/suspense.ts index b437007468f20..79ad479f0a42f 100644 --- a/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow/ui/openapi-gen/queries/suspense.ts @@ -4,6 +4,7 @@ import { UseQueryOptions, useSuspenseQuery } from "@tanstack/react-query"; import { AssetService, ConnectionService, + DagRunService, DagService, DashboardService, VariableService, @@ -258,3 +259,34 @@ export const useVariableServiceGetVariableSuspense = < queryFn: () => VariableService.getVariable({ variableKey }) as TData, ...options, }); +/** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const useDagRunServiceGetDagRunSuspense = < + TData = Common.DagRunServiceGetDagRunDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagRunServiceGetDagRunKeyFn( + { dagId, dagRunId }, + queryKey, + ), + queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, + ...options, + }); diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index 8f76ebd13c40f..18df5284651b7 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -784,6 +784,145 @@ export const $DAGResponse = { description: "DAG serializer for responses.", } as const; +export const $DAGRunResponse = { + properties: { + run_id: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Run Id", + }, + dag_id: { + type: "string", + title: "Dag Id", + }, + logical_date: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Logical Date", + }, + start_date: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Start Date", + }, + end_date: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "End Date", + }, + data_interval_start: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Data Interval Start", + }, + data_interval_end: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Data Interval End", + }, + last_scheduling_decision: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Last Scheduling Decision", + }, + run_type: { + $ref: "#/components/schemas/DagRunType", + }, + state: { + $ref: "#/components/schemas/DagRunState", + }, + external_trigger: { + type: "boolean", + title: "External Trigger", + }, + triggered_by: { + $ref: "#/components/schemas/DagRunTriggeredByType", + }, + conf: { + type: "object", + title: "Conf", + }, + note: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Note", + }, + }, + type: "object", + required: [ + "run_id", + "dag_id", + "logical_date", + "start_date", + "end_date", + "data_interval_start", + "data_interval_end", + "last_scheduling_decision", + "run_type", + "state", + "external_trigger", + "triggered_by", + "conf", + "note", + ], + title: "DAGRunResponse", + description: "DAG Run serializer for responses.", +} as const; + export const $DAGRunStates = { properties: { queued: { @@ -845,6 +984,29 @@ so please ensure that their values always match the ones with the same name in TaskInstanceState.`, } as const; +export const $DagRunTriggeredByType = { + type: "string", + enum: [ + "cli", + "operator", + "rest_api", + "ui", + "test", + "timetable", + "dataset", + "backfill", + ], + title: "DagRunTriggeredByType", + description: "Class with TriggeredBy types for DagRun.", +} as const; + +export const $DagRunType = { + type: "string", + enum: ["backfill", "scheduled", "manual", "dataset_triggered"], + title: "DagRunType", + description: "Class with DagRun types.", +} as const; + export const $DagTagPydantic = { properties: { name: { diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 24fbb9c29c46a..9a126aef25fbc 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -25,6 +25,8 @@ import type { DeleteVariableResponse, GetVariableData, GetVariableResponse, + GetDagRunData, + GetDagRunResponse, } from "./types.gen"; export class AssetService { @@ -361,3 +363,32 @@ export class VariableService { }); } } + +export class DagRunService { + /** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ + public static getDagRun( + data: GetDagRunData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}", + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } +} diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 368c981b9da1e..45bfa51aec9c4 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -120,6 +120,28 @@ export type DAGResponse = { readonly file_token: string; }; +/** + * DAG Run serializer for responses. + */ +export type DAGRunResponse = { + run_id: string | null; + dag_id: string; + logical_date: string | null; + start_date: string | null; + end_date: string | null; + data_interval_start: string | null; + data_interval_end: string | null; + last_scheduling_decision: string | null; + run_type: DagRunType; + state: DagRunState; + external_trigger: boolean; + triggered_by: DagRunTriggeredByType; + conf: { + [key: string]: unknown; + }; + note: string | null; +}; + /** * DAG Run States for responses. */ @@ -149,6 +171,28 @@ export type DAGRunTypes = { */ export type DagRunState = "queued" | "running" | "success" | "failed"; +/** + * Class with TriggeredBy types for DagRun. + */ +export type DagRunTriggeredByType = + | "cli" + | "operator" + | "rest_api" + | "ui" + | "test" + | "timetable" + | "dataset" + | "backfill"; + +/** + * Class with DagRun types. + */ +export type DagRunType = + | "backfill" + | "scheduled" + | "manual" + | "dataset_triggered"; + /** * Serializable representation of the DagTag ORM SqlAlchemyModel used by internal API. */ @@ -304,6 +348,13 @@ export type GetVariableData = { export type GetVariableResponse = VariableResponse; +export type GetDagRunData = { + dagId: string; + dagRunId: string; +}; + +export type GetDagRunResponse = DAGRunResponse; + export type $OpenApiTs = { "/ui/next_run_assets/{dag_id}": { get: { @@ -580,4 +631,31 @@ export type $OpenApiTs = { }; }; }; + "/public/dags/{dag_id}/dagRuns/{dag_run_id}": { + get: { + req: GetDagRunData; + res: { + /** + * Successful Response + */ + 200: DAGRunResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; }; diff --git a/tests/api_fastapi/views/public/test_dag_run.py b/tests/api_fastapi/views/public/test_dag_run.py new file mode 100644 index 0000000000000..dab81907068e7 --- /dev/null +++ b/tests/api_fastapi/views/public/test_dag_run.py @@ -0,0 +1,137 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from datetime import datetime, timezone + +import pytest + +from airflow.operators.empty import EmptyOperator +from airflow.utils.session import provide_session +from airflow.utils.state import DagRunState +from airflow.utils.types import DagRunTriggeredByType, DagRunType +from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags + +pytestmark = pytest.mark.db_test + +DAG1_ID = "test_dag1" +DAG2_ID = "test_dag2" +DAG1_RUN1_ID = "dag_run_1" +DAG1_RUN2_ID = "dag_run_2" +DAG2_RUN1_ID = "dag_run_3" +DAG2_RUN2_ID = "dag_run_4" +DAG1_RUN1_STATE = DagRunState.SUCCESS +DAG1_RUN2_STATE = DagRunState.FAILED +DAG2_RUN1_STATE = DagRunState.SUCCESS +DAG2_RUN2_STATE = DagRunState.SUCCESS +DAG1_RUN1_RUN_TYPE = DagRunType.MANUAL +DAG1_RUN2_RUN_TYPE = DagRunType.SCHEDULED +DAG2_RUN1_RUN_TYPE = DagRunType.BACKFILL_JOB +DAG2_RUN2_RUN_TYPE = DagRunType.DATASET_TRIGGERED +DAG1_RUN1_TRIGGERED_BY = DagRunTriggeredByType.UI +DAG1_RUN2_TRIGGERED_BY = DagRunTriggeredByType.DATASET +DAG2_RUN1_TRIGGERED_BY = DagRunTriggeredByType.CLI +DAG2_RUN2_TRIGGERED_BY = DagRunTriggeredByType.REST_API +START_DATE = datetime(2024, 6, 15, 0, 0, tzinfo=timezone.utc) +EXECUTION_DATE = datetime(2024, 6, 16, 0, 0, tzinfo=timezone.utc) +DAG1_NOTE = "test_note" + + +@pytest.fixture(autouse=True) +@provide_session +def setup(dag_maker, session=None): + clear_db_runs() + clear_db_dags() + clear_db_serialized_dags() + + with dag_maker( + DAG1_ID, + schedule="@daily", + start_date=START_DATE, + ): + EmptyOperator(task_id="task_1") + dag1 = dag_maker.create_dagrun( + run_id=DAG1_RUN1_ID, + state=DAG1_RUN1_STATE, + run_type=DAG1_RUN1_RUN_TYPE, + triggered_by=DAG1_RUN1_TRIGGERED_BY, + ) + dag1.note = (DAG1_NOTE, 1) + + dag_maker.create_dagrun( + run_id=DAG1_RUN2_ID, + state=DAG1_RUN2_STATE, + run_type=DAG1_RUN2_RUN_TYPE, + triggered_by=DAG1_RUN2_TRIGGERED_BY, + execution_date=EXECUTION_DATE, + ) + + with dag_maker( + DAG2_ID, + schedule=None, + start_date=START_DATE, + ): + EmptyOperator(task_id="task_2") + dag_maker.create_dagrun( + run_id=DAG2_RUN1_ID, + state=DAG2_RUN1_STATE, + run_type=DAG2_RUN1_RUN_TYPE, + triggered_by=DAG2_RUN1_TRIGGERED_BY, + execution_date=EXECUTION_DATE, + ) + dag_maker.create_dagrun( + run_id=DAG2_RUN2_ID, + state=DAG2_RUN2_STATE, + run_type=DAG2_RUN2_RUN_TYPE, + triggered_by=DAG2_RUN2_TRIGGERED_BY, + execution_date=EXECUTION_DATE, + ) + + dag_maker.dagbag.sync_to_db() + dag_maker.dag_model + dag_maker.dag_model.has_task_concurrency_limits = True + session.merge(dag_maker.dag_model) + session.commit() + + +@pytest.mark.parametrize( + "dag_id, run_id, state, run_type, triggered_by, dag_run_note", + [ + (DAG1_ID, DAG1_RUN1_ID, DAG1_RUN1_STATE, DAG1_RUN1_RUN_TYPE, DAG1_RUN1_TRIGGERED_BY, DAG1_NOTE), + (DAG1_ID, DAG1_RUN2_ID, DAG1_RUN2_STATE, DAG1_RUN2_RUN_TYPE, DAG1_RUN2_TRIGGERED_BY, None), + (DAG2_ID, DAG2_RUN1_ID, DAG2_RUN1_STATE, DAG2_RUN1_RUN_TYPE, DAG2_RUN1_TRIGGERED_BY, None), + (DAG2_ID, DAG2_RUN2_ID, DAG2_RUN2_STATE, DAG2_RUN2_RUN_TYPE, DAG2_RUN2_TRIGGERED_BY, None), + ], +) +def test_get_dag_run(test_client, dag_id, run_id, state, run_type, triggered_by, dag_run_note): + response = test_client.get(f"/public/dags/{dag_id}/dagRuns/{run_id}") + assert response.status_code == 200 + body = response.json() + assert body["dag_id"] == dag_id + assert body["run_id"] == run_id + assert body["state"] == state + assert body["run_type"] == run_type + assert body["triggered_by"] == triggered_by.value + assert body["note"] == dag_run_note + + +def test_get_dag_run_not_found(test_client): + response = test_client.get(f"/public/dags/{DAG1_ID}/dagRuns/invalid") + assert response.status_code == 404 + body = response.json() + assert body["detail"] == "The DagRun with dag_id: `test_dag1` and run_id: `invalid` was not found" From 14bbbb69caabd462d97f6bc3d17b2d5dbe4228c8 Mon Sep 17 00:00:00 2001 From: majorosdonat Date: Thu, 10 Oct 2024 11:34:15 +0200 Subject: [PATCH 058/125] add majorosdonat as bosch user (#42890) Co-authored-by: Majoros Donat (XC-DX/EET2-Bp) --- INTHEWILD.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/INTHEWILD.md b/INTHEWILD.md index c65597acfb60f..310d018b98329 100644 --- a/INTHEWILD.md +++ b/INTHEWILD.md @@ -101,7 +101,7 @@ Currently, **officially** using Airflow: 1. [Bombora Inc](https://bombora.com/) [[@jeffkpayne](https://github.com/jeffkpayne), [@pakelley](https://github.com/pakelley), [@dNavalta](https://github.com/dNavalta), [@austynh](https://github.com/austynh), [@TheOriginalAlex](https://github.com/TheOriginalAlex)] 1. [Bonial International GmbH](https://www.bonial.com/) 1. [Bonnier Broadcasting](http://www.bonnierbroadcasting.com) [[@wileeam](https://github.com/wileeam)] -1. [Bosch (Robert Bosch GmbH)](https://www.bosch.com/stories/topics/automated-driving/) [[@jscheffl](https://github.com/jscheffl), [@clellmann](https://github.com/clellmann), [@wolfdn](https://github.com/wolfdn), [@AutomationDev85](https://github.com/AutomationDev85)] +1. [Bosch (Robert Bosch GmbH)](https://www.bosch.com/stories/topics/automated-driving/) [[@jscheffl](https://github.com/jscheffl), [@clellmann](https://github.com/clellmann), [@wolfdn](https://github.com/wolfdn), [@AutomationDev85](https://github.com/AutomationDev85), [@majorosdonat](https://github.com/majorosdonat)] 1. [BounceX](http://www.bouncex.com) [[@JoshFerge](https://github.com/JoshFerge), [@hudsonrio](https://github.com/hudsonrio), [@ronniekritou](https://github.com/ronniekritou)] 1. [Braintree](https://www.braintreepayments.com) [[@coopergillan](https://github.com/coopergillan), [@curiousjazz77](https://github.com/curiousjazz77), [@raymondberg](https://github.com/raymondberg)] 1. [Branch](https://branch.io) [[@sdebarshi](https://github.com/sdebarshi), [@dmitrig01](https://github.com/dmitrig01)] From 978bb0c6b6a753edae0ef9c45e613d5be2e01672 Mon Sep 17 00:00:00 2001 From: Vincent <97131062+vincbeck@users.noreply.github.com> Date: Thu, 10 Oct 2024 09:06:54 -0400 Subject: [PATCH 059/125] Move the session auth backend to FAB auth manager (#42878) --- .../auth_manager/api/auth/backend/session.py | 47 ++++++++++++ .../api/auth/backend/test_session.py | 73 +++++++++++++++++++ 2 files changed, 120 insertions(+) create mode 100644 providers/src/airflow/providers/fab/auth_manager/api/auth/backend/session.py create mode 100644 providers/tests/fab/auth_manager/api/auth/backend/test_session.py diff --git a/providers/src/airflow/providers/fab/auth_manager/api/auth/backend/session.py b/providers/src/airflow/providers/fab/auth_manager/api/auth/backend/session.py new file mode 100644 index 0000000000000..d51f7bf1cf4c9 --- /dev/null +++ b/providers/src/airflow/providers/fab/auth_manager/api/auth/backend/session.py @@ -0,0 +1,47 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Session authentication backend.""" + +from __future__ import annotations + +from functools import wraps +from typing import Any, Callable, TypeVar, cast + +from flask import Response + +from airflow.www.extensions.init_auth_manager import get_auth_manager + +CLIENT_AUTH: tuple[str, str] | Any | None = None + + +def init_app(_): + """Initialize authentication backend.""" + + +T = TypeVar("T", bound=Callable) + + +def requires_authentication(function: T): + """Decorate functions that require authentication.""" + + @wraps(function) + def decorated(*args, **kwargs): + if not get_auth_manager().is_logged_in(): + return Response("Unauthorized", 401, {}) + return function(*args, **kwargs) + + return cast(T, decorated) diff --git a/providers/tests/fab/auth_manager/api/auth/backend/test_session.py b/providers/tests/fab/auth_manager/api/auth/backend/test_session.py new file mode 100644 index 0000000000000..513e8ec2aa293 --- /dev/null +++ b/providers/tests/fab/auth_manager/api/auth/backend/test_session.py @@ -0,0 +1,73 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from unittest.mock import Mock, patch + +import pytest +from flask import Response + +from airflow.providers.fab.auth_manager.api.auth.backend.session import requires_authentication +from airflow.www import app as application + +from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS + +pytestmark = [ + pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), +] + + +@pytest.fixture +def app(): + return application.create_app(testing=True) + + +mock_call = Mock() + + +@requires_authentication +def function_decorated(): + mock_call() + + +@pytest.mark.db_test +class TestSessionAuth: + def setup_method(self) -> None: + mock_call.reset_mock() + + @patch("airflow.providers.fab.auth_manager.api.auth.backend.session.get_auth_manager") + def test_requires_authentication_when_not_authenticated(self, mock_get_auth_manager, app): + auth_manager = Mock() + auth_manager.is_logged_in.return_value = False + mock_get_auth_manager.return_value = auth_manager + with app.test_request_context() as mock_context: + mock_context.request.authorization = None + result = function_decorated() + + assert type(result) is Response + assert result.status_code == 401 + + @patch("airflow.providers.fab.auth_manager.api.auth.backend.session.get_auth_manager") + def test_requires_authentication_when_authenticated(self, mock_get_auth_manager, app): + auth_manager = Mock() + auth_manager.is_logged_in.return_value = True + mock_get_auth_manager.return_value = auth_manager + with app.test_request_context() as mock_context: + mock_context.request.authorization = None + function_decorated() + + mock_call.assert_called_once() From c7104f53b9fbb0795822745848824fe322acd2f7 Mon Sep 17 00:00:00 2001 From: Vincent <97131062+vincbeck@users.noreply.github.com> Date: Thu, 10 Oct 2024 09:08:42 -0400 Subject: [PATCH 060/125] Move user and roles schemas to fab provider (#42869) --- .../role_and_permission_endpoint.py | 6 +++--- .../auth_manager/api_endpoints/user_endpoint.py | 6 +++--- .../fab/auth_manager/schemas/__init__.py | 16 ++++++++++++++++ .../schemas/role_and_permission_schema.py | 0 .../fab/auth_manager}/schemas/user_schema.py | 2 +- .../tests/fab/auth_manager/schemas/__init__.py | 17 +++++++++++++++++ .../test_role_and_permission_schema.py | 2 +- .../test_user_schema.py | 5 ++++- 8 files changed, 45 insertions(+), 9 deletions(-) create mode 100644 providers/src/airflow/providers/fab/auth_manager/schemas/__init__.py rename {airflow/api_connexion => providers/src/airflow/providers/fab/auth_manager}/schemas/role_and_permission_schema.py (100%) rename {airflow/api_connexion => providers/src/airflow/providers/fab/auth_manager}/schemas/user_schema.py (96%) create mode 100644 providers/tests/fab/auth_manager/schemas/__init__.py rename providers/tests/fab/auth_manager/{api_endpoints => schemas}/test_role_and_permission_schema.py (97%) rename providers/tests/fab/auth_manager/{api_endpoints => schemas}/test_user_schema.py (97%) diff --git a/providers/src/airflow/providers/fab/auth_manager/api_endpoints/role_and_permission_endpoint.py b/providers/src/airflow/providers/fab/auth_manager/api_endpoints/role_and_permission_endpoint.py index ed42f91163982..121a88be28587 100644 --- a/providers/src/airflow/providers/fab/auth_manager/api_endpoints/role_and_permission_endpoint.py +++ b/providers/src/airflow/providers/fab/auth_manager/api_endpoints/role_and_permission_endpoint.py @@ -26,15 +26,15 @@ from airflow.api_connexion.exceptions import AlreadyExists, BadRequest, NotFound from airflow.api_connexion.parameters import check_limit, format_parameters -from airflow.api_connexion.schemas.role_and_permission_schema import ( +from airflow.api_connexion.security import requires_access_custom_view +from airflow.providers.fab.auth_manager.models import Action, Role +from airflow.providers.fab.auth_manager.schemas.role_and_permission_schema import ( ActionCollection, RoleCollection, action_collection_schema, role_collection_schema, role_schema, ) -from airflow.api_connexion.security import requires_access_custom_view -from airflow.providers.fab.auth_manager.models import Action, Role from airflow.providers.fab.auth_manager.security_manager.override import FabAirflowSecurityManagerOverride from airflow.security import permissions from airflow.www.extensions.init_auth_manager import get_auth_manager diff --git a/providers/src/airflow/providers/fab/auth_manager/api_endpoints/user_endpoint.py b/providers/src/airflow/providers/fab/auth_manager/api_endpoints/user_endpoint.py index 665b7f52d896f..43464a23d365e 100644 --- a/providers/src/airflow/providers/fab/auth_manager/api_endpoints/user_endpoint.py +++ b/providers/src/airflow/providers/fab/auth_manager/api_endpoints/user_endpoint.py @@ -27,14 +27,14 @@ from airflow.api_connexion.exceptions import AlreadyExists, BadRequest, NotFound, Unknown from airflow.api_connexion.parameters import check_limit, format_parameters -from airflow.api_connexion.schemas.user_schema import ( +from airflow.api_connexion.security import requires_access_custom_view +from airflow.providers.fab.auth_manager.models import User +from airflow.providers.fab.auth_manager.schemas.user_schema import ( UserCollection, user_collection_item_schema, user_collection_schema, user_schema, ) -from airflow.api_connexion.security import requires_access_custom_view -from airflow.providers.fab.auth_manager.models import User from airflow.providers.fab.auth_manager.security_manager.override import FabAirflowSecurityManagerOverride from airflow.security import permissions from airflow.www.extensions.init_auth_manager import get_auth_manager diff --git a/providers/src/airflow/providers/fab/auth_manager/schemas/__init__.py b/providers/src/airflow/providers/fab/auth_manager/schemas/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/providers/src/airflow/providers/fab/auth_manager/schemas/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/api_connexion/schemas/role_and_permission_schema.py b/providers/src/airflow/providers/fab/auth_manager/schemas/role_and_permission_schema.py similarity index 100% rename from airflow/api_connexion/schemas/role_and_permission_schema.py rename to providers/src/airflow/providers/fab/auth_manager/schemas/role_and_permission_schema.py diff --git a/airflow/api_connexion/schemas/user_schema.py b/providers/src/airflow/providers/fab/auth_manager/schemas/user_schema.py similarity index 96% rename from airflow/api_connexion/schemas/user_schema.py rename to providers/src/airflow/providers/fab/auth_manager/schemas/user_schema.py index 167dd160d1baf..4155667d56766 100644 --- a/airflow/api_connexion/schemas/user_schema.py +++ b/providers/src/airflow/providers/fab/auth_manager/schemas/user_schema.py @@ -22,8 +22,8 @@ from marshmallow_sqlalchemy import SQLAlchemySchema, auto_field from airflow.api_connexion.parameters import validate_istimezone -from airflow.api_connexion.schemas.role_and_permission_schema import RoleSchema from airflow.providers.fab.auth_manager.models import User +from airflow.providers.fab.auth_manager.schemas.role_and_permission_schema import RoleSchema class UserCollectionItemSchema(SQLAlchemySchema): diff --git a/providers/tests/fab/auth_manager/schemas/__init__.py b/providers/tests/fab/auth_manager/schemas/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/providers/tests/fab/auth_manager/schemas/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py b/providers/tests/fab/auth_manager/schemas/test_role_and_permission_schema.py similarity index 97% rename from providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py rename to providers/tests/fab/auth_manager/schemas/test_role_and_permission_schema.py index f5ec73a3e9893..8e44c35546332 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_schema.py +++ b/providers/tests/fab/auth_manager/schemas/test_role_and_permission_schema.py @@ -18,7 +18,7 @@ import pytest -from airflow.api_connexion.schemas.role_and_permission_schema import ( +from airflow.providers.fab.auth_manager.schemas.role_and_permission_schema import ( RoleCollection, role_collection_schema, role_schema, diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_user_schema.py b/providers/tests/fab/auth_manager/schemas/test_user_schema.py similarity index 97% rename from providers/tests/fab/auth_manager/api_endpoints/test_user_schema.py rename to providers/tests/fab/auth_manager/schemas/test_user_schema.py index b37f27abe0397..648372d3baebd 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_user_schema.py +++ b/providers/tests/fab/auth_manager/schemas/test_user_schema.py @@ -24,8 +24,11 @@ from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_role, delete_role with ignore_provider_compatibility_error("2.9.0+", __file__): - from airflow.api_connexion.schemas.user_schema import user_collection_item_schema, user_schema from airflow.providers.fab.auth_manager.models import User + from airflow.providers.fab.auth_manager.schemas.user_schema import ( + user_collection_item_schema, + user_schema, + ) TEST_EMAIL = "test@example.org" From 2b541f3c1513e1682816ad3a56b4250f3259c1c6 Mon Sep 17 00:00:00 2001 From: Daniel Standish <15932138+dstandish@users.noreply.github.com> Date: Thu, 10 Oct 2024 08:16:01 -0700 Subject: [PATCH 061/125] Add support for run conf to backfill (#42865) This actually does a little bit more. It changes the backfill create endpoint to take a json payload instead of just query params. This is just easier because we can use the backfill schema as the schema of the request body. One thing that is maybe weird is I add a decorator to translate the request body to the kwargs in the endpoint function. The main motivator here was for compatibility with the requires_access_dag decorator, which doesn't check request body. --- .../endpoints/backfill_endpoint.py | 39 +++++++++--- airflow/api_connexion/openapi/v1.yaml | 59 +++---------------- .../api_connexion/schemas/backfill_schema.py | 17 +++--- airflow/models/backfill.py | 7 ++- .../api_endpoints/test_backfill_endpoint.py | 20 +++---- .../endpoints/test_backfill_endpoint.py | 23 ++++---- tests/models/test_backfill.py | 4 +- 7 files changed, 74 insertions(+), 95 deletions(-) diff --git a/airflow/api_connexion/endpoints/backfill_endpoint.py b/airflow/api_connexion/endpoints/backfill_endpoint.py index a0e728c5bc464..94d6ad21f05f1 100644 --- a/airflow/api_connexion/endpoints/backfill_endpoint.py +++ b/airflow/api_connexion/endpoints/backfill_endpoint.py @@ -19,14 +19,14 @@ import logging from functools import wraps -from typing import TYPE_CHECKING, cast +from typing import TYPE_CHECKING -import pendulum -from pendulum import DateTime +from flask import request +from marshmallow import ValidationError from sqlalchemy import select from airflow.api_connexion import security -from airflow.api_connexion.exceptions import Conflict, NotFound +from airflow.api_connexion.exceptions import BadRequest, Conflict, NotFound from airflow.api_connexion.schemas.backfill_schema import ( BackfillCollection, backfill_collection_schema, @@ -42,6 +42,8 @@ from airflow.www.decorators import action_logging if TYPE_CHECKING: + from datetime import datetime + from sqlalchemy.orm import Session from airflow.api_connexion.types import APIResponse @@ -119,12 +121,33 @@ def get_backfill(*, backfill_id: int, session: Session = NEW_SESSION, **kwargs): raise NotFound("Backfill not found") +def backfill_obj_to_kwargs(f): + """ + Convert the request body (containing backfill object json) to kwargs. + + The main point here is to be compatible with the ``requires_access_dag`` decorator, + which takes dag_id kwarg and doesn't support json request body. + """ + + @wraps(f) + def inner(): + body = request.json + try: + obj = backfill_schema.load(body) + except ValidationError as err: + raise BadRequest(detail=str(err.messages)) + return f(**obj) + + return inner + + +@backfill_obj_to_kwargs @security.requires_access_dag("PUT") @action_logging def create_backfill( dag_id: str, - from_date: str, - to_date: str, + from_date: datetime, + to_date: datetime, max_active_runs: int = 10, reverse: bool = False, dag_run_conf: dict | None = None, @@ -132,8 +155,8 @@ def create_backfill( try: backfill_obj = _create_backfill( dag_id=dag_id, - from_date=cast(DateTime, pendulum.parse(from_date)), - to_date=cast(DateTime, pendulum.parse(to_date)), + from_date=from_date, + to_date=to_date, max_active_runs=max_active_runs, reverse=reverse, dag_run_conf=dag_run_conf, diff --git a/airflow/api_connexion/openapi/v1.yaml b/airflow/api_connexion/openapi/v1.yaml index 305653e781797..b39d1cd955dd9 100644 --- a/airflow/api_connexion/openapi/v1.yaml +++ b/airflow/api_connexion/openapi/v1.yaml @@ -277,57 +277,12 @@ paths: x-openapi-router-controller: airflow.api_connexion.endpoints.backfill_endpoint operationId: create_backfill tags: [Backfill] - parameters: - - name: dag_id - in: query - schema: - type: string - required: true - description: | - Create dag runs for this dag. - - - name: from_date - in: query - schema: - type: string - format: date-time - required: true - description: | - Create dag runs with logical dates from this date onward, including this date. - - - name: to_date - in: query - schema: - type: string - format: date-time - required: true - description: | - Create dag runs for logical dates up to but not including this date. - - - name: max_active_runs - in: query - schema: - type: integer - required: false - description: | - Maximum number of active DAG runs for the the backfill. - - - name: reverse - in: query - schema: - type: boolean - required: false - description: | - If true, run the dag runs in descending order of logical date. - - - name: config - in: query - schema: - # todo: AIP-78 make this object - type: string - required: false - description: | - If true, run the dag runs in descending order of logical date. + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/Backfill" responses: "200": description: Success. @@ -2920,7 +2875,7 @@ components: nullable: true description: To date of the backfill (exclusive). dag_run_conf: - type: string + type: object nullable: true description: Dag run conf to be forwarded to the dag runs. is_paused: diff --git a/airflow/api_connexion/schemas/backfill_schema.py b/airflow/api_connexion/schemas/backfill_schema.py index 7f83d76df6dfc..db496bf1ac5d5 100644 --- a/airflow/api_connexion/schemas/backfill_schema.py +++ b/airflow/api_connexion/schemas/backfill_schema.py @@ -34,15 +34,16 @@ class Meta: model = Backfill id = auto_field(dump_only=True) - dag_id = auto_field(dump_only=True) - from_date = auto_field(dump_only=True) - to_date = auto_field(dump_only=True) + dag_id = auto_field() + from_date = auto_field() + to_date = auto_field() dag_run_conf = fields.Dict(allow_none=True) - is_paused = auto_field(dump_only=True) - max_active_runs = auto_field(dump_only=True) - created_at = auto_field(dump_only=True) - completed_at = auto_field(dump_only=True) - updated_at = auto_field(dump_only=True) + reverse = fields.Boolean() + is_paused = auto_field() + max_active_runs = auto_field() + created_at = auto_field() + completed_at = auto_field() + updated_at = auto_field() class BackfillDagRunSchema(SQLAlchemySchema): diff --git a/airflow/models/backfill.py b/airflow/models/backfill.py index 37683ee6f1e55..aa9cb695b7579 100644 --- a/airflow/models/backfill.py +++ b/airflow/models/backfill.py @@ -41,7 +41,8 @@ from airflow.utils.types import DagRunTriggeredByType, DagRunType if TYPE_CHECKING: - from pendulum import DateTime + from datetime import datetime + log = logging.getLogger(__name__) @@ -121,8 +122,8 @@ def validate_sort_ordinal(self, key, val): def _create_backfill( *, dag_id: str, - from_date: DateTime, - to_date: DateTime, + from_date: datetime, + to_date: datetime, max_active_runs: int, reverse: bool, dag_run_conf: dict | None, diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py index 9d9a79af51135..f8015ff907ba6 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py @@ -19,7 +19,6 @@ import os from datetime import datetime from unittest import mock -from urllib.parse import urlencode import pendulum import pytest @@ -197,21 +196,20 @@ def test_create_backfill(self, session, dag_maker): to_date = pendulum.parse("2024-02-01") to_date_iso = to_date.isoformat() max_active_runs = 5 - query = urlencode( - query={ - "dag_id": dag.dag_id, - "from_date": f"{from_date_iso}", - "to_date": f"{to_date_iso}", - "max_active_runs": max_active_runs, - "reverse": False, - } - ) + data = { + "dag_id": dag.dag_id, + "from_date": f"{from_date_iso}", + "to_date": f"{to_date_iso}", + "max_active_runs": max_active_runs, + "reverse": False, + } kwargs = {} kwargs.update(environ_overrides={"REMOTE_USER": "test_granular_permissions"}) response = self.client.post( - f"/api/v1/backfills?{query}", + "/api/v1/backfills", **kwargs, + json=data, ) assert response.status_code == 200 assert response.json == { diff --git a/tests/api_connexion/endpoints/test_backfill_endpoint.py b/tests/api_connexion/endpoints/test_backfill_endpoint.py index 67ec6316e2ffd..5bb959cabaf56 100644 --- a/tests/api_connexion/endpoints/test_backfill_endpoint.py +++ b/tests/api_connexion/endpoints/test_backfill_endpoint.py @@ -19,7 +19,6 @@ import os from datetime import datetime from unittest import mock -from urllib.parse import urlencode import pendulum import pytest @@ -272,21 +271,21 @@ def test_create_backfill(self, user, expected, session, dag_maker): to_date = pendulum.parse("2024-02-01") to_date_iso = to_date.isoformat() max_active_runs = 5 - query = urlencode( - query={ - "dag_id": dag.dag_id, - "from_date": f"{from_date_iso}", - "to_date": f"{to_date_iso}", - "max_active_runs": max_active_runs, - "reverse": False, - } - ) + data = { + "dag_id": dag.dag_id, + "from_date": f"{from_date_iso}", + "to_date": f"{to_date_iso}", + "max_active_runs": max_active_runs, + "reverse": False, + "dag_run_conf": {"param1": "val1", "param2": True}, + } kwargs = {} if user: kwargs.update(environ_overrides={"REMOTE_USER": user}) response = self.client.post( - f"/api/v1/backfills?{query}", + "/api/v1/backfills", + json=data, **kwargs, ) assert response.status_code == expected @@ -295,7 +294,7 @@ def test_create_backfill(self, user, expected, session, dag_maker): "completed_at": mock.ANY, "created_at": mock.ANY, "dag_id": "TEST_DAG_1", - "dag_run_conf": None, + "dag_run_conf": {"param1": "val1", "param2": True}, "from_date": from_date_iso, "id": mock.ANY, "is_paused": False, diff --git a/tests/models/test_backfill.py b/tests/models/test_backfill.py index 0f471fbd56546..06c41cadd8cc3 100644 --- a/tests/models/test_backfill.py +++ b/tests/models/test_backfill.py @@ -93,13 +93,14 @@ def test_create_backfill_simple(reverse, dag_maker, session): """ with dag_maker(schedule="@daily") as dag: PythonOperator(task_id="hi", python_callable=print) + expected_run_conf = {"param1": "valABC"} b = _create_backfill( dag_id=dag.dag_id, from_date=pendulum.parse("2021-01-01"), to_date=pendulum.parse("2021-01-05"), max_active_runs=2, reverse=reverse, - dag_run_conf={}, + dag_run_conf=expected_run_conf, ) query = ( select(DagRun) @@ -114,6 +115,7 @@ def test_create_backfill_simple(reverse, dag_maker, session): expected_dates = list(reversed(expected_dates)) assert dates == expected_dates assert all(x.state == DagRunState.QUEUED for x in dag_runs) + assert all(x.conf == expected_run_conf for x in dag_runs) def test_params_stored_correctly(dag_maker, session): From 2b15e9f26fee27b6c1fbc8167d0e0558198ffa7a Mon Sep 17 00:00:00 2001 From: Pierre Jeambrun Date: Thu, 10 Oct 2024 23:34:59 +0800 Subject: [PATCH 062/125] Fix main (#42903) --- tests/api_fastapi/views/public/test_dag_run.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/api_fastapi/views/public/test_dag_run.py b/tests/api_fastapi/views/public/test_dag_run.py index dab81907068e7..176ae07d3fadb 100644 --- a/tests/api_fastapi/views/public/test_dag_run.py +++ b/tests/api_fastapi/views/public/test_dag_run.py @@ -25,7 +25,8 @@ from airflow.utils.session import provide_session from airflow.utils.state import DagRunState from airflow.utils.types import DagRunTriggeredByType, DagRunType -from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags + +from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = pytest.mark.db_test From c0ab523c59484fe7c98adadf796522d9ffef972b Mon Sep 17 00:00:00 2001 From: Daniel Standish <15932138+dstandish@users.noreply.github.com> Date: Thu, 10 Oct 2024 13:18:31 -0700 Subject: [PATCH 063/125] Fix main: js/types/api-generated.ts (#42906) --- airflow/www/static/js/types/api-generated.ts | 25 ++++++-------------- 1 file changed, 7 insertions(+), 18 deletions(-) diff --git a/airflow/www/static/js/types/api-generated.ts b/airflow/www/static/js/types/api-generated.ts index 6aeb8721e4280..15391c2942432 100644 --- a/airflow/www/static/js/types/api-generated.ts +++ b/airflow/www/static/js/types/api-generated.ts @@ -919,7 +919,7 @@ export interface components { /** @description To date of the backfill (exclusive). */ to_date?: string | null; /** @description Dag run conf to be forwarded to the dag runs. */ - dag_run_conf?: string | null; + dag_run_conf?: { [key: string]: unknown } | null; /** @description is_paused */ is_paused?: boolean | null; /** @description max_active_runs */ @@ -2728,22 +2728,6 @@ export interface operations { }; }; create_backfill: { - parameters: { - query: { - /** Create dag runs for this dag. */ - dag_id: string; - /** Create dag runs with logical dates from this date onward, including this date. */ - from_date: string; - /** Create dag runs for logical dates up to but not including this date. */ - to_date: string; - /** Maximum number of active DAG runs for the the backfill. */ - max_active_runs?: number; - /** If true, run the dag runs in descending order of logical date. */ - reverse?: boolean; - /** If true, run the dag runs in descending order of logical date. */ - config?: string; - }; - }; responses: { /** Success. */ 200: { @@ -2755,6 +2739,11 @@ export interface operations { 401: components["responses"]["Unauthenticated"]; 403: components["responses"]["PermissionDenied"]; }; + requestBody: { + content: { + "application/json": components["schemas"]["Backfill"]; + }; + }; }; get_backfill: { parameters: { @@ -5531,7 +5520,7 @@ export type ListBackfillsVariables = CamelCasedPropertiesDeep< operations["list_backfills"]["parameters"]["query"] >; export type CreateBackfillVariables = CamelCasedPropertiesDeep< - operations["create_backfill"]["parameters"]["query"] + operations["create_backfill"]["requestBody"]["content"]["application/json"] >; export type GetBackfillVariables = CamelCasedPropertiesDeep< operations["get_backfill"]["parameters"]["path"] From f4270da2c76bb59dafed1b7f410871f7edf7f90a Mon Sep 17 00:00:00 2001 From: GPK Date: Thu, 10 Oct 2024 21:55:58 +0100 Subject: [PATCH 064/125] mark TestGKEStartKueueInsideClusterOperator tests with flaky decorator (#42916) --- .../tests/google/cloud/operators/test_kubernetes_engine.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/providers/tests/google/cloud/operators/test_kubernetes_engine.py b/providers/tests/google/cloud/operators/test_kubernetes_engine.py index d11a5af90a1cf..f0f42745c1c4d 100644 --- a/providers/tests/google/cloud/operators/test_kubernetes_engine.py +++ b/providers/tests/google/cloud/operators/test_kubernetes_engine.py @@ -545,6 +545,7 @@ def setup_test(self): self.gke_op._cluster_url = CLUSTER_URL self.gke_op._ssl_ca_cert = SSL_CA_CERT + @pytest.mark.flaky(reruns=5) @pytest.mark.db_test @mock.patch.dict(os.environ, {}) @mock.patch(TEMP_FILE) @@ -560,6 +561,7 @@ def test_execute(self, mock_pod_hook, mock_deployment, mock_hook, fetch_cluster_ fetch_cluster_info_mock.assert_called_once() + @pytest.mark.flaky(reruns=5) @mock.patch.dict(os.environ, {}) @mock.patch(TEMP_FILE) @mock.patch(f"{GKE_CLUSTER_AUTH_DETAILS_PATH}.fetch_cluster_info") @@ -578,6 +580,7 @@ def test_execute_autoscaled_cluster( assert "Kueue installed successfully!" in caplog.text + @pytest.mark.flaky(reruns=5) @mock.patch.dict(os.environ, {}) @mock.patch(TEMP_FILE) @mock.patch(f"{GKE_CLUSTER_AUTH_DETAILS_PATH}.fetch_cluster_info") From 70b8e50ddce305582802b6d08b1c948fa3ef086a Mon Sep 17 00:00:00 2001 From: Daniel Standish <15932138+dstandish@users.noreply.github.com> Date: Thu, 10 Oct 2024 14:39:33 -0700 Subject: [PATCH 065/125] Remove dag.run() method (#42761) This method uses Backfill internally. Before we can remove BackfillJobRunner, we need to remove DAG.run. But before we can remove DAG.run, we need to update some old tests that use it. So this is the first step towards removing BackflilJobRunner. There were some very old tests that came from airflow github issue 1225. These appeared to test the scheduler but really they tested the backfill job runner. Just to be cautious, I kept most of them rather than remove (which probably would have been fine since they essentially tested code that we'll be removing). As appropriate I either changed them to run on dag.test or scheduler. The ones dealing with ignore first depends on past will have to be added back when that functionality is implemented in new backfill. --- airflow/executors/executor_loader.py | 2 + airflow/models/dag.py | 83 +------- .../contributors_quick_start_pycharm.rst | 27 +-- .../contributors_quick_start_vscode.rst | 3 +- .../images/pycharm_add_configuration.png | Bin 97805 -> 0 bytes .../images/pycharm_add_env_variable.png | Bin 86158 -> 0 bytes contributing-docs/testing/dag_testing.rst | 15 +- dev/tests_common/test_utils/system_tests.py | 7 +- .../test_utils/system_tests_class.py | 26 --- docs/apache-airflow/core-concepts/debug.rst | 13 +- .../example_dags/example_display_video.py | 4 +- .../cloud/operators/test_dataprep_system.py | 8 +- .../cloud/operators/test_datastore_system.py | 4 +- .../test_facebook_ads_to_gcs_system.py | 6 +- .../test_salesforce_to_gcs_system.py | 7 +- .../operators/test_display_video_system.py | 15 +- .../google/cloud/dataprep/example_dataprep.py | 1 + tests/cli/commands/test_dag_command.py | 2 +- tests/core/test_example_dags_system.py | 118 ++++++++++- tests/dags/test_future_start_date.py | 41 ++++ tests/dags/test_issue_1225.py | 149 ------------- tests/jobs/test_backfill_job.py | 30 --- tests/jobs/test_scheduler_job.py | 195 +++--------------- tests/models/test_dag.py | 19 -- tests/models/test_xcom_arg.py | 7 +- 25 files changed, 240 insertions(+), 542 deletions(-) delete mode 100644 contributing-docs/quick-start-ide/images/pycharm_add_configuration.png delete mode 100644 contributing-docs/quick-start-ide/images/pycharm_add_env_variable.png create mode 100644 tests/dags/test_future_start_date.py delete mode 100644 tests/dags/test_issue_1225.py diff --git a/airflow/executors/executor_loader.py b/airflow/executors/executor_loader.py index 1eeee1ff68a9f..ec79860918b80 100644 --- a/airflow/executors/executor_loader.py +++ b/airflow/executors/executor_loader.py @@ -173,6 +173,8 @@ def set_default_executor(cls, executor: BaseExecutor) -> None: This is used in rare cases such as dag.run which allows, as a user convenience, to provide the executor by cli/argument instead of Airflow configuration + + todo: given comments above, is this needed anymore since DAG.run is removed? """ exec_class_name = executor.__class__.__qualname__ exec_name = ExecutorName(f"{executor.__module__}.{exec_class_name}") diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 2dc425daa0549..5cc5cf4431407 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -28,7 +28,6 @@ import sys import time import traceback -import warnings import weakref from collections import abc, defaultdict, deque from contextlib import ExitStack @@ -88,13 +87,11 @@ DuplicateTaskIdFound, FailStopDagInvalidTriggerRule, ParamValidationError, - RemovedInAirflow3Warning, TaskDeferred, TaskNotFound, UnknownExecutorException, ) from airflow.executors.executor_loader import ExecutorLoader -from airflow.jobs.job import run_job from airflow.models.abstractoperator import AbstractOperator, TaskStateChangeCallback from airflow.models.asset import ( AssetDagRunQueue, @@ -2296,84 +2293,8 @@ def _remove_task(self, task_id: str) -> None: self.task_count = len(self.task_dict) - def run( - self, - start_date=None, - end_date=None, - mark_success=False, - local=False, - donot_pickle=airflow_conf.getboolean("core", "donot_pickle"), - ignore_task_deps=False, - ignore_first_depends_on_past=True, - pool=None, - delay_on_limit_secs=1.0, - verbose=False, - conf=None, - rerun_failed_tasks=False, - run_backwards=False, - run_at_least_once=False, - continue_on_failures=False, - disable_retry=False, - ): - """ - Run the DAG. - - :param start_date: the start date of the range to run - :param end_date: the end date of the range to run - :param mark_success: True to mark jobs as succeeded without running them - :param local: True to run the tasks using the LocalExecutor - :param donot_pickle: True to avoid pickling DAG object and send to workers - :param ignore_task_deps: True to skip upstream tasks - :param ignore_first_depends_on_past: True to ignore depends_on_past - dependencies for the first set of tasks only - :param pool: Resource pool to use - :param delay_on_limit_secs: Time in seconds to wait before next attempt to run - dag run when max_active_runs limit has been reached - :param verbose: Make logging output more verbose - :param conf: user defined dictionary passed from CLI - :param rerun_failed_tasks: - :param run_backwards: - :param run_at_least_once: If true, always run the DAG at least once even - if no logical run exists within the time range. - """ - warnings.warn( - "`DAG.run()` is deprecated and will be removed in Airflow 3.0. Consider " - "using `DAG.test()` instead, or trigger your dag via API.", - RemovedInAirflow3Warning, - stacklevel=2, - ) - - from airflow.executors.executor_loader import ExecutorLoader - from airflow.jobs.backfill_job_runner import BackfillJobRunner - - if local: - from airflow.executors.local_executor import LocalExecutor - - ExecutorLoader.set_default_executor(LocalExecutor()) - - from airflow.jobs.job import Job - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=self, - start_date=start_date, - end_date=end_date, - mark_success=mark_success, - donot_pickle=donot_pickle, - ignore_task_deps=ignore_task_deps, - ignore_first_depends_on_past=ignore_first_depends_on_past, - pool=pool, - delay_on_limit_secs=delay_on_limit_secs, - verbose=verbose, - conf=conf, - rerun_failed_tasks=rerun_failed_tasks, - run_backwards=run_backwards, - run_at_least_once=run_at_least_once, - continue_on_failures=continue_on_failures, - disable_retry=disable_retry, - ) - run_job(job=job, execute_callable=job_runner._execute) + def run(self, *args, **kwargs): + """Leaving this here to be removed in other PR for simpler review.""" def cli(self): """Exposes a CLI specific to this DAG.""" diff --git a/contributing-docs/quick-start-ide/contributors_quick_start_pycharm.rst b/contributing-docs/quick-start-ide/contributors_quick_start_pycharm.rst index d830496b27206..4a3319ae97dd1 100644 --- a/contributing-docs/quick-start-ide/contributors_quick_start_pycharm.rst +++ b/contributing-docs/quick-start-ide/contributors_quick_start_pycharm.rst @@ -78,35 +78,14 @@ It requires "airflow-env" virtual environment configured locally. - Copy any example DAG present in the ``/airflow/example_dags`` directory to ``/files/dags/``. -- Add a ``__main__`` block at the end of your DAG file to make it runnable. It will run a ``back_fill`` job: +- Add a ``__main__`` block at the end of your DAG file to make it runnable: .. code-block:: python if __name__ == "__main__": - dag.clear() - dag.run() + dag.test() -- Add ``AIRFLOW__CORE__EXECUTOR=DebugExecutor`` to Environment variable of Run Configuration. - - - Click on Add configuration - - .. raw:: html - -
- Add Configuration pycharm -
- - - Add Script Path and Environment Variable to new Python configuration - - .. raw:: html - -
- Add environment variable pycharm -
- -- Now Debug an example dag and view the entries in tables such as ``dag_run, xcom`` etc in MySQL Workbench. +- Run the file. Creating a branch ################# diff --git a/contributing-docs/quick-start-ide/contributors_quick_start_vscode.rst b/contributing-docs/quick-start-ide/contributors_quick_start_vscode.rst index 88ff1fdd84e52..61fdf501063db 100644 --- a/contributing-docs/quick-start-ide/contributors_quick_start_vscode.rst +++ b/contributing-docs/quick-start-ide/contributors_quick_start_vscode.rst @@ -72,8 +72,7 @@ Setting up debugging if __name__ == "__main__": - dag.clear() - dag.run() + dag.test() - Add ``"AIRFLOW__CORE__EXECUTOR": "DebugExecutor"`` to the ``"env"`` field of Debug configuration. diff --git a/contributing-docs/quick-start-ide/images/pycharm_add_configuration.png b/contributing-docs/quick-start-ide/images/pycharm_add_configuration.png deleted file mode 100644 index 525b73e6141abc16831d4228d60d504e16188696..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 97805 zcmbq)byQnT*Kcr#Qmjy{xVyW%y9Fs$+}#U>;uLpxcb6i?-7Qd@;O=s1pXYtQ@2>mj zot2!G>@#O(XZD`oY)hz;f+Pwe9wGn$K#`UbQvm?ryZ`_Q6?n)uin9#K1^|G_u?~`IP98RK#(V| zQ_?R2Zs=525NCYn`KNovjG#!+6AL6p8t$xE8SkiPEMbplREcS)(bc3d z`;2gCg{t~|O%Y$Z(=Gh1Tj*!0ivjgJ_D2uD-F?Z6OZRnU-yu`4z3<_IPb5Q(TRWG} z_mANt=3@7IIAs04y5oSHySOX;wl%Voo!wdwY8Ui_( z!I$QAV2>*PQ{9rw-Hc{-N(!hm!PlU!hl?!lo{kaKc)k>&EPa4^l~s@Z_R}B`5PR~G z)8qZqxkoQi7Z!E_;@6A(mb#zldsbo8;IhWyK&K6XjVH?-Oy@32V)1re_+ac}YK~ID z226Hy`A|Mr0G?Q;>=*`gtkfEGiWN0I+!+63n}@`VlU?F!En2TU;8Ot>%tuD4`>TRh}`ku$rx(jP<*G5Z-_*C<96=_Q! zxRLeQlCA7t4NZ2n%`#Y4W|xY_YN)c*lgb;NuT8Cv+DC#ZTvu8j8~GIw;R?B~2Q4HO z1zr0K=51T9hix?Nx?Be+E$!6&9-p9Qr$0745%1iBDJ>&l=c1`&XF|Emh!d?K9bBN( zyYcH5RV$W$IF06-ri!o#hZ$qII`;WjXU6XoDjs#lJnA;Xfc=n~p1NX?P(iOyEdb}P ztsA&vn}wEZ;+q4J2@5*s^@Ij*+vwfr~S1-pm25?7T{t>Uqx*>SM}7N_+Vi zmLC}IugGsJmRsX%Rj5%{Q+QV_xigl5p9<;S)wVKS4|%>No3id<$F_(xZ97IL-ojT9 z87q61S0YW!cX~m&*K+1j6m@#0sa?hq>K&%nGj z5%H+S$8*huweN<))}|-U@4S@)Ylh|PBuHuVP3~m00&0@ud5sF^0=NZb+++lAM+JD4 zp_w+pW^Eb53FX_t2<}a&wcy4=8+$-I*F&f+jrW||P-I&=7cGSi5lZh2^Gc(LsGbhH z0c(e9u`r>oe)+;KU-lE-#3Y06M?T?leBk!DtK%K@dE#kKDjLDfF`Au31^^)T+bw=t z+s(4DYk#Y3F6#r_ea1%P>|VwQhGpAeMNtzXh1C{w)0p$ZhoTDCiJCT4)|0kdjNp0r z?0k1w`vN$2Jxy@apb5z_u~RV=wjzD9r(gl^k8@|?MBnFBD~g^Ep3FKalV6g55X}{C zg52P-pl#4^Y>nf`sTw?sa~&6!E3ynkkh7R&O0r`%=tDV+#4YASop2vmZ7ykf*XMbq zT?ua|>Ygd1pC97$l`24L9}QgNP={wpBi&7_Z*Ct(_9Ln31@TunwgFZNBC6bhytlSA zZK%8|P6u1oGkTLIV@$ngU#pZ5d&!rkQI>#i_vM1zIwENGz=gzkWKfvKJU>SQ#h@$P!009}-wV&(yI!gx%1!YrHQXNuZHy8mrjM z%?p`NRt5&nQcDQRfl0Dk60~)sw zLwAft0aOJ}*sr$C$wK}6m~wPhjcG%A7~Gezm>e!96?)x=(>Q0uP4g)=rdROd>K4PE z#pG^3Iao1M^p>qBNGG$pyrD}z3PNZ+8DGv(r6YU_Fvz^>2>DKUFC#s!uX4UZ#lZf_ z5$z<}Rz?hG5=AE)z1D6c-7&vVqE~}1NYFF__>gU^wx~DTj`vG+N*q|H6clDOg#C`? zs{8Dw{x;ix>9fAK*lg`pdgy6s4>X&itz@>?_MnSf{)}hA3bHETNh*lId@Q(Ik%}(8 z)WO!$E~Au(irPJpM<<_vXAhNI=in#2As-Dtdt0B9RL2U=hwlQpOx27hl1#Y8ZQ|@E zC4yh;%kDVDe?q)SqVg}ni{~N6SR+QWwAXDkzT5sf1P;s{1VfTWfsYp1ZOF2Xyi-@2 z#0Y-&8~Om)rt#QvNiN;>4LCyOAzCWC*=T^cWR(A^4SKgLeF#tvuGTTm6i4#W+M}a0895@PkYG z&ISBT=1aVw>FX036pD;uN{DRj)e@R=c{wJY#k-_$59u^)NhvcFVxPd=wNu3+-@Tfl zJfh|w{BiW7efd-D`LpNk2=JrhdYo=w+&xE8ipDsozMPG~z>1w3I!;gs6f}k(k{zUc zLDwtdI;;$^lVXJW)@h53CE!&9aja!MCZaI)_3#CbQdl%D6t^vnej6Umf(lP*3N>2@ z9dYyK4sNCy zyBHk~cnZnI1A6j-5YMwts)EjRld1u*FvNkn(byNq zzHKP)?)iGPVnc(9mpf`!rw_eg-SXOJw6M24cSZAie&uwzMbhkJ>t z+V)0%Ck30RZV_UV#2!*lD15`!!q)L))>!cj#}21|hb!2e}Ld4a&XdRk^h$=b)>CPk#1=)LDU- zvy22B;e+lHOj{PgTQFc0F};nhodH1v-(ZHkH-w037f_0R3hJ$XzLFQ9l1bQ&S zXHb1ZQ_@r7GEW97U(mO@9R$}vUaK8YPuDD}V^Ih~b9AH$-;q*|O5b^lGm7l@!+(Cy zdd;mY*W=_3>K#~Uqnfx7xZT@Vb)|?zWi9vjZtY<;*SAaU|9jk9;@q;wm?_+bbt;GeQeG3$_ z066v&w~BwEU>4lk#C}@s&!6;@b-Oq7s{>%Dn8M0f&Y#_%m^8AFT%z6F8g6e+Q_TDX z6S^E5q>O6F-i&($dns)v000T&&l>`en*QO92JdiuNb^K<`qBJiJr{&Q*g5GwzF zQKUXGZAJXO`?5ay^>Sr{JHbu7;Ru5dL=P{NQY~eR3@scl9{&G`}vj zIUTTEJ~%T_#V0dfl^q`eSxEokXJ^Cn(E5A6KjD>Z|JLBaSTWFy zzAJl#h-l6Ii5bl5<@>Jd7ZMkyF5W*JBC2T1gZ%U}k{PJzu%P^-D0=(VhCs^N@&!NK zD%?QEuZ~$po1Sf?30n4DfvNl z(cNxxa~V%~Bvn)^HUp?uP?e2H-B|Befm^087#gNfVB`6)Qu>~&{GZ=%tY0{r*`dzx zuJe&Z-!*`$O$ZZLb*>9(VC^gYR!g#j#GiG1aAaD!O1nr9ZcI!otIcnNIsbtc{-yEx z`>%DE=}KL9;^AK!q80w*e$FS0JKihG5HF3Ti}-&f@Jh|7YN@zN71w;6CM1>BtcAcZs)%v~VJ7NW{R`*_;aeq_6n@P$58Sd*D zy9LCG;PT{;gK=)J&<<1KWkrmuJ}<5DDy;RlV9tC+UBWm9uV$~E6G`^@?(kjbIZE*G zJ``<}_$*I^{#6AdUktXFdQXBVPFBA89**YhBwb~V>YlA_M3URdul#LI=4P`>{t%Q) zR#ghXerZry2%~_)$T)3fF${TSfXkzQw!m7U3a4L;){ul($rV`8ij;L)w$BpQru_jL zu3^TUecDGzlVY9Emxhg}LhGm6+-w6PKJU_c^NBQ!TO&O@H8n`vC($^4YTBXaUgvh58=;%w%-2Rusn6W+(2S!qX7Aj;w$lo<+`4%-fs_K0GQa> zf%RXM>%v=K+aHe2@cihUJ8tvOs#r?^Os!ek=5C-W96@M*9SDN!_T{d?)V}xj_eg&R znKTbk*G*;I{Gh#*YBd8RjZWD};Hh*orMbL6ygkSp9p`8je=Rq5uP36d9{2`2@z>Vr z=^1Zr`GZe#L;gq>!Gq6bN^!g+xsNfS@;kT3{b0dyiiO(pql!gh8ASyg2mmlSRYOLZ zyQB&RAe2;4fHfWD`%#zPV`j{Gsjh3mhX3|=gVaioO47K8r^i5{DgwKZ{W2VyqcRQ| zkjJ$urZiO0kFQp1sq)pHi0~8XO=<*=;?{I{-+_eLF6?ju7;WNc&4q@fL85n3Dvrk2 z<#aGg^}NB3@cv0!&lCL@GY2Q9Qpr$6i-RF(cf6m29(GtAQfJ2|4$%@?#rC?5et>0eQMCHhh-*#s1Zm=_bk2vl7!-e{FaXn59Y2LeCEH#Qs-hpEQlTt@PzDd-Vfp=naaXzwcV(KZdV_EAf zF_@h^EsnO^J>)P4 z8_H6Azvy|<^C9Cnury&T5+V@syj5-~0VlBKjs0w4<#j*+q!uDT9$Yj9DL18r2i}nZ z-*>v)ckeM0@>QK(%@OCbm)HBI&lUJ$wbq}R_LV(MeTM`rfx3UY;c|AU0`pWX%<-z` z9>4irXuM@#0%pb(E#)AR&*S6!KWw>^yG;~<9 zK1C53k$(@TrBBlDsfku=QWYFG?ZZ#)?N63aw5lBN(=)YH0AhE6-p8eqc)>RG%(40t-rPC?YRRLZu!ah1 zsQrstM?8R5AqVy_G*Cm0vWCzInc+T@x;Jer%uLD(WcfZF zr($UuEOhYUBMru8pX{T;YUoH+4#MVE1WR{A9jDpkX3rBzEAF!vNbAmVU}ZapO0&Ym zO3OSaO4kbs7WfV$Ll`0fhsZ#RwPJa>4Bb6Z=+DVidi!*LX|vK8Dn=4ZMk$y4`{bmu z`S86+_~mY$uMI<*aHkiJgp{9XuvI$xA4n5mbsjt9eSPxT5Y|bF@r_F`nXD&4tfZ^T z4cNAHI9MmBZB9La25M>Ri|_Mt?Ptg;MkG}ErL*Sbnpda zs7wufdDscZ>4`@Dsd6IK<<=~NSKQOan+Wo8qwOv(uk?CXvUOzX$ikp6b*b# zn^T@E2B3c*y5B08kbs~=g)OW1B`3e2LSeE1YiEv)%4e2>g?5Qd&LxUQI7vf&ZZaEL zMUxSdFsh{#aPvdzL%QPDFL2&Vx6?-CQ$CZS6Tb7W$4#ueJjIAcGbfX%ULZ65BM zi`QFRAH`>9!mMS6YAP}udz)g8DeNqsj#+YY3q=4!hfMBDUdL|h<;G{DxNl+YjgGuM z3^7am>(YqBkgB$^<6q{(=#OwnK?t~3bV6#%8sZqp>guMD{)t7g@*)eESZ_0{0!6<8 z4jLlfNy!iRvihQqMs#}ZtQNrikaTmB^To@jZkaT@$qO>)KsKm!H+7W+hKGx9n$62J z!n#nzFF$f+MEcz@9fV3~v?g23jf-Ravv?vXYl-LgPZ1Fjl!}>0t1Wd5;MG`2o3Z#c zvvkl=g931*{X-%O;4?F5O!M`)*IKUN#3>lp{4cgogmW<#`9orb*ao&Wm_j7zVtRIx z@e7H+Shc^nxu&76SJnALyQHZ*BK5RdAuSca^6yWVTiTg?5^ z7*hiUc#&=%p!`Ta-rrq}03u5PbwAp$bnLsJ(l&>OV`BjPvBNE30|D9Lc~KcrCDS)4 z97Vn1f*c`09``YD$Qo=|1W-+v7M55z9>;^S5jJ^#9$N+=#Pop-!r4Q*7Lxy~78ROB)z0n*}?# zVL_?EcnF*A3n>jhRbf*poKscGf>AUx-7i0j)s*-d*kO>TxXjyMTMhW&5#CRj27EVN z>BjnZaZj~tO+q1Mo!r!-B1+!FTL6+94!|objKQW01(^^+De~@IB~r8ytp8f3II!!h zLHBoay@6IV$8uZ|!}Z>!FaV^$L0R(e#>h2RFAsq3OcQG2yX~v-!$q zQL9Ajfr*W_k$Rc~ZI|O`HHj>K3}DWb@3fFPfy+quiK6Rj*TqD|OMI$hMA)yX48}KdA0FFL%KdlkASR6tXQwsu7lO-T4D~g z*w`_WdM_udY0d!gv)0vV_kB=ncyzeLrK5LH;^W?Ta`M>$Rhv z`)@3*@;_m#&=p?E3}=tJIXAHHzAs2ADOpka<}i}6t265$NCHLI_9l_)r-=SwJK^!I&x%B?<6~tnQ&AU{mgCef3&xtQcV?SW zny+_aQ?)H07rJ=nSz3{iYs4to7yB62;F#vJCMT)veWfBYw^4#!bYp@v$G)i0~yb>rIy9T zdvns@o*or+z*)20N7clTxUh)1F_*LFf8AUnbV`FI7jE!YL6_X2ew?Qc^9vfy8FzO0 z>B*{dkz5H2ZAG@kjIo*~QoFbCAE`bx71PAM5X?UD4nB){j|BhaYEp84`f87vxhLAz zyvAq$H5$aVGJ8PaC?$JqI+BY!R`$WJ>$|C=BZ z7Vr=1{ptU=+y6@tj!#H1qW!Ukbd7hoEC1$z zjn$@&S#oB5LQ`zvt6AbR@RM)i}HmX>DBx9Ui7FSt*Y?ru4ilVrvq6(cHdO ztk?ZlTOQJx)?cEx9~89TTVHZm^#Asl$O_qZ@xuAI6lY#WdN!+THG#g(dfBH-c71ek{1V#BGr9}oVsgoIGf#1H5T$&A zZF)n8Y3!r~xaLv_VJ2RHZW*sE2vNn?6B-@m9;o$g*Um!uV|>5F)Ove;RaxL3{i z2RQy}oz5n(A6M7{{<#|d}ilXj1it#v`rM_Mu$aUc*L#-+(qf48IOsh^nlT4w* z@toU+KQ9ZT?m90~gVmIMb7@(cwp~=?-hzsZ`Ib62M*htDIsaa>)JIc~m# zQlE*>uw}{8Ek1vtExtk)^HWkKL|4_z<=R<;FN*{PFd@r87*993THus=En-rKuZ-Zq z_gHBG)U?HU64n4_c5qZIn8>D2WxF*+R(6BxDtsBn*Zax$~Ck|>tB~}E|UrKF` zXl_C%AOir(qvtNVZutWB_;Xg4z+vt94{5|2hnru<@A!(C{);g&){zw%c}Qre^K;+^ ze$gICbK!>sg8^4prujKmhZ7E$N2qVMecD)V-bU3)kvWx_B~DG$a9d(32dYkH>As*~ z{77&0XM(Q_K0@XVVbw@ErgnCD`UWj`5w)lXr+xAEn4v%B*HS?eBkoTrVXXt-F*xrI zljYi#)NhOS@2fib@}?+dcEHHb=v{O-qf6XAQ`aQYHquFY2D5W>!%~vE1C#S!`_pw* zmJp?O-c}W+Jm=n~&v`*Vkd`gezi=JBlG!3z$F#V;AF zwL}*lpr^;y_=g^^wZhZ2oeO~fP^n^BD2wQV;=zG+X_b}9g2Q&LD#BbO3c@!uanlfh zAn9OuxL@7v*ZLmnFi7D$K1q4hklu|8N`uOTso2f*m5)U?_UsD_bDU0BY1NGSbTu9Y z-wd9uU(0g>j%^}c8eAf*pEbo1bn#QUUTzimI;s@rIIGXb(NNnty@=JsZShwICVmB1 z4b055Hy(0r?u~%MRzFKfFpe}If~?279tWWP;5r!;C;HavUY7?c>@cT;S{SXqPbcgo zuyt!FTihTzOiy;}rz!Xn-kNoW8GzdFWyFvO*PE}+nw7N~KZoNs+8iecaG?*KVB*$^ zevYGgxtdEZl$$Osmfw1~6TT(TU{s%s4*|Y>D)v@+=c00AJrK{Q8 zmrGeLXq>xuxP2suupZ^LTe}ONZYoibf-%e1`B_pK*KfN@QfqU2dL9B<)VNYhf55=4$PUSiJI6C^FDn&1VXwG1ymDlef@+m$`Bs zoUj_jpdkVJs5fp|mh>UkN>3$~@fxYV3`Xfaeu_)2S^1jxNK1+@arDt8ymBCz?!>=UhIbf`I$)_#B2fADlO6b3z%Y zb36)y71hME1fBUu%n#a2Y`D~Z{OpgPx|lTKE9W`CDe(H1(U~R)iU+0<+)s?V7)w%v zkTdr3zh5`9cxpcYPvr+yog}N8%>^B%u!($V1O88EYlR+x4>uFMn= z`xLHWH8)*pv;OE&%cOR)o@f19`5JEYFjhcW)}fu7+|gw~=K%cR`rrk}0r&~srcyVA z`(BnflIIGSNod?B8lmE(8^p%A%I8(qZV&?tNN~A-5*^+M2|$(OCTX}h*za$~bP)`^ zs3wL{>>)ywqM<+V<(L}}D);#CqGioLv?8=7hR|!W9n8MKqnOfEUA0j8l6ty6baHlG zMR4%KyP76-rt9^raZ^s+8IW0i*<{-v=B<}3WV(*%Gqle%tQhXU39C$Ya&O_5I~0G3 zqxo10<-f1}QX}e68`i-3RP6DK@x@xZXl4f&m;7f<^=#ZC-R2!XEo%PCc&o3g1uqsL zndzrx^=Wh&M+ln!ApxU{6qO0pyw62wgA}7y%o|~YG%rt{FT;Jz;AFY|h?`53 zkwl=C!tVQPd?*9;Wz}JEHl>cjxlSqJ^v(u2_S@E34*j@N@r{EkHE#PyL`|cG_3?E& zT)cAeg8n{@>Wmfc+SD~_;^!dhj)n!Qhb!?NLfljD?Ow#ok6s!(@`WYwjOCWEiQ(@qX&`L`CS zG|{{AHOlKoZ`@8Ou3ev2F{8rG^L8QX4WB4C*Jyi63|=`-8&lP215eU&VaPK76L#a3q-W#q* zKoeN7xnB?K_oyNz0vY0KxuNBi*jwwHlS!^x;qI$rE7zs4p(elOED(P6Qew=z zT|nIhbzY~}q7fdKFl-H;Vy)0*D9o?{gr!d8Y4&a^w3ZzvCm)CuOj1#BHwK1Lks>jq zh~6XOhW*_O^mOG%%bvHXce+6`r&uBLXtU(0z&kH`>7BK=L0$3rR^`pUs!81CVn;Yz zGR5duUNbgP+(4q{qDwAFhT+u7589iYUe|3IWMD z9SArXO0sO4sWni2Vxv=SljU64eJ;kgU^5xJu$>#getNI5T+J_@!Z{y-=3K+}+v5D{ zxU)qGf(-=_qFp7i2So9s-bBJ@IeDdt+cSi2)AnY5EoMUHAWieM3|5-O@1_4D> z)P+h`F`-$WU0qE}>__WEtKFeO@d4Ok@*9je30PF*lW5IJT(r}kLM1_;dGR9Jt5Izc z_9wrMSlw6}Ix25Lh9M^z&pS0)v*!3ofIsotdXuncS4nBfd&mGGbPPoeM$QTt__4J^ z1)D6x3+H2aM&6`s4@qNMEY4d~w5f->#LkuM7F`AbPj{1Amz$Cnr`PeAq7XHnyJlwD zOnw<`6Clr{61Aa_Cc_L%ahKEB*vvvW%mk4*lE;ZY$7rEjSOi94xp2YROj2x#*KH<> zvz_^%^#&Pvb1-bFv$Ah^A>hA8*ubIY4wFGt1{GT6e_8A?zAX9#@5P04)t-C)- zsCl|dmCOIeepJ9CpjpuC%CdH4&HUKBDBH3m|C6Z3>^xNA635ojvkfsuW`);fG!;79 zF6kJdeusHPR5SXq<1J|q|12Q-jN{{WvRSs#YJNJ|S@u<6^*VlbQi-x;dNVdUP0h20 zh@%RUC*yc|O7&Q2@f+>Z)&TC0_mBL7O}}l(!KLl(jv~5}Y1BSaRAyAQkwBh%H#1T5 zB*9EKyqCvOdHzZ#P3!3Y)B=oKm5~{2OzXtX&Thu48ToEJCcGAV6ojnjM(gJX!3M>? zPs^qqrnOMAC@+EkO^zI<4*O#B*$@u7-U=p34AiY-ct9@Ws>F3?f(N7No zSB(KEKYl237ZH`Zu3!z41Lz)6?6n(qXA2|=(Hxgf zI&F_A;zikfd*Zo^b&jtJnir5-YqPUce_L8@aQqX~dqpd)sHj1Y!Pj(^mR^L1_i5a2 ziJ_E=CH|75Bm`x#zhi?iG{NcuT3xrWD;rS}%Rqi=UBqCFI!0rROOdBwZ`O+)9vZe- zd*F0!+YlwN>_vTQKiPM>&Ln+2ke2fujEbLYG$5LJl5O)7c%H2HLi?B)<>%J)m9|YY z%Xb(&KcUzF6Zw;gsLeC#@bEDj@h}hLmtbTGJ0X|f(4s&_M~8P?a~jRuLh>Y-KR3kl zszz{upkoaF1G>qbGRs1z^hGMV`wp|M@v)Z}Mn zJ?v>Wluyx;dW%2luZQG7l-L&i!%$XpV^aN`>u6&wiD?Te;g<%}<50d8Zd&{lA-vmz!$y@>4rtUWZbi0+3HtwbgEnK z>{*Tqdfap1q#6$jk%{x>&j`btK8W$w(bVLDf~zk3VWYtV(-Jp!u6w$J3(Juk=Wx{d z*7DSardu&Y?f}<*$I5^N_BYRbs=-pwCH-PCr>U##TU1yz)62YINUz0;U5HoB4o_>W z*DZri+bgPHch;4x&*!A=+m8or*8P!`atq`yj={Y#0=y>mu7{bOsovN&ceO0tuz(}4Y80@c!3>;`vfj^bD(*Ap$2guzzg4^ZBBZ+X5r?zO+1)esaEu}MK` z|6s`ojW~RfGMZa)( z`H@%jO)*zHpIZ0&%uvt}44^g%oP{vC>-h)g*M7jpc;%62)b8BdEu-MMtl6O{e7M5JuBO@!6Bf#57hR)BVCK!| zw4lxEO!dUJIjdzEhXqLGar7^lFEA>Usc~2cnv9KO#T{$?w|Fdv5nfqCBe>%1wdkpb zEXm<~<|O#VaNy^>5WElUeotVxYh9VIucreIk(=#5l~aO&ykyyCKP%GFAFP=n{ChW~ z+S;x!rn0uPBMBeYON3ZUXE$|1RTV1N(#lP3`(}toR7j&wdC0GT1lA*W5j{_2M84j^ z>JQriO?(H6Ty27v5>~@GK=YK|RMM_jY>YT#s4fp_QEpsq`};8$zPl=DwL+*|lA zC)i#S$$XYMw66-jF*_#mVuhrM~^i04cC^Tv3`4(|72Tcc=WP(h96zN+L38 zaLQX<#7SWA>x-ag5cWJG6^vR?Wkn@A)W7Ck19pzm;Ep4fc5VdHv__kT__TeC-YVlx zooK-kfwl3Rofo7699hN_uDNng=TxP)YBV+FwCQes>6RiL-hq{Q`N#~r!_md_y`FMU zW=&^Q;Zx<|mJaETwWW1(;t%;5=dw}k*6r>aYEb&vl{=s4O6Fmf*%5)|Df5;d^Yc>B zS;^#5m`zwJ?fP=^)oSW0&0a-elf8ej4*X-6(89pBD~ z$K8(u=HE!xtS4r3Ys+rWLaLPB2#k86`$ z|I+#JEfl3q%lkNWecE#RvC(4nxqMmGJY$0-cpd7j(#pcp@;(MS+xaGXym4%4wYrWoGD5@PV>4>FJc?44@XS?Oo1dSUc*_7#9`DF-g7wrnD5kENbEaFq zO}`uYra9Kxo-NBbj?1@a68bnQcyk15K08z}Q&ayI5&!KoV-#X<(^y9B`C8lZOs?PQ zKK;$X9BqJ*5soWlKz3;<4L5g7dwV+rNKajz-0of^r#e5MjFpx3)29vaOp#)ifc)E9 zjgHb$Q{P7<*)8-R6Mza9M)?&N^-0a?_1L_u87WE{#7z;W&nFoGA2<`ky zZb*rlS$PvGtI%e;@d`G`8C8hQT>Zc?#|T#uP_OHBKmQDBb;-%e@$m4#``g|=ynTJE zEb=!c8~LsK&9gCzTn0A6K++~-sWx0%*4V~qOZ)> zg$6QdkaC6F$nC@zs|nt5HIJ^Nx5}knB|~8^t0F>(mK%(`por9>6ZYx$-UEw8?0;1J zd0~>9vMFn@u&-}@g^}@^;HSF%c|#3v@XgbScW1!F@2XvHFsqCz;Ws@2U$^1*&crcV zrFXsuBAH7Il4x|-s-8}96BRwD`Gs_tUJmg%jpmmSNE3o)^XoldSRV~|J;M_c6BCn? z4#8mX;bEMKtW|k=xwVnekL2XLH`xKQsfMSgRkXFWD@8H|d>$OgWo2a#UrtX>)*{0A zeV?@YJC2tdtc-qtH9w>r>r&aNdph`$Qh9Jagdszmy=eDBoHD9$G90UO-o64;V%lRf zA+vA?ruxk%Gy|8Fh3xnITfaY$J5!a8O@7<+;lA$>=e?b`{Z`bPRjPhZI1XbJA^)%m z%UjLY$;O53A*$a>tA{%TS~NbVYJ24GT}nbC~n{YGFi zg8~px;Nes@HQgyuElQT#ZFbli9UTQ^+kQz&OLIGz9U2|Ae%lpA3K=Y%oSLetRx$x7 zGh=TO^Y!9Q8{56S@9gZfS+2K`4hOZlRWX40OouUH10evdt*sCc5T2f%lF@`HC@3y& zZb{GJ@cPU0{rk#uAn17P%43-is_e$6&@P&$R5lffh|XbV)4txv&xZjL&`SJy;VZ&3 z50CbT{V#`j+D$|)yvlM&-$?L;Fq`HT(|R}Tom*`ce$-DyQa#@6f;(2U4-OB~8c@=! zA7m_TK#Tl*qy8fs`&W0Y8Ib-LaR#^!>=m6zB`|=gEz{!$ZhZR3mL_si` zy2QpZRG_1evZf|2DS-`vsP1Ww>_n%hQ(s`9i;JhtL^dG%Vd+I%6l!G!@6ht_!_q@l z*n1UBn6GCZQK|TExuhM%&ZmtZ;wd?YlrAnDa|`UXhg^&I?IUmfYHG(5gnabC9D<)>#Qi7ipXpCdI;T*XJ&cY|}gESM++V@_J6}6^&q*xC)w+yB5xZ^!@@$jU$$$VarkSWbS!hX3cdMo9+ z2e!X~!XDo5nT^*xNuj5QrzhWI`fHaf`sM44hCK%a`Z~#cjy5XW1$Z|rQ6SK`EsE5) zGkxuDK;eCnO5V9l`1w`~ow3qfhlC>+K5Et00nTUKg!|^j+y1)vQjPVyafuP<&6eRN zMlN&8vxXH$FFMG7HltD*x;PQv*>{4wYr9Ly<@ul!!H0uq+3w-RUZ+Q{kCs$rUvwX$ zKp$x>r-7%A$a{Y44-xy#?t80lZ|ZMYd!TBQ8*x}RaHnNiM&op63$;fS?iGhojBLO= z+m+FfQ;3*WFuK!*qU9~1e#c>o_;rPK3>RAXUH-1-o)zNFjfTcsI_G7fzOTiNL*b%n zM%B1Zy&0B+pf!30ixl!*m)=}J1GUL>=$L>do2McxGM#jg= zL`_UgY-}pJy8M8xxPUjK#u+?a(E6H|l5+p}xG+C2AtAw0^D{jiKeQ|j2C}^d5+PlHH?z*HHoXBQ(#+{7M_zqr0V)cyTjaO;1uyZ*G#OR7@{#bY2 zAMm44Cj)Y&u4)y%PrzFPs<1ryF}l>E^k%5n#39?KJFW=)nYe)m)>$c*X?Tf_o`I86 zaD7q8;Im$-4)%$tSv1>BKC8&B^)j19u36egrz1QGan1iHQ3rtlkZWVM!g1b07pj&A zX~j(aM)<|Kdri>Ul?&RpOX96jtvUj6)VPUUkBR0jyIiR~9}e|uZ2-`)CwCMH+c7l~ z`6Do{Z3duQw}*PkrmtxkQ9MMmkr*bxtPjPDcEi&s96L>JHd;*iTF)mxro4{{1uF=> zomMY?6}28CX|%N+gml3?yH@Rwkpl9?fEn(O(;c5*mt@USSVr{Ims`DV&U(GOHu83n zXG=U?oey7dF|G*g810Q9ff?^y&-H6g>)-w)pl(%1qIhJ$PeS6mcMa<9moH1Znt@92 zu$GyOACb#qt{Ye>!{TeR-*5haZ{mU-$bh1Y$z~#1`5YrX(#dQX&&S(&|NEO4_~QU~ ze0nOc=P&-p(D25)ebxJO@LTW$*4>!?i7=CbQ#;@FNV$8Y2M^M)xkCdDY22?>^6gwI zPXZI>nLmvdsa6s#@;3R!5^s;P=@xDiTBy=BEFly|>7LzHIpf0IMq3+jy2ekBO?t2h zY{l~=$RU8wW2cSdnUV^SOCJ^nkJmk!AfzfgsHyW<-YcM^e2vjjpZFPd5JGaohkbQv z4CT+nb0M5im=*nuQ?CX1>k;NKD%kWHglx@vF`SqmCGzJU+tH%p`!YL~T&CTgsrBd7 z1Q<`SK2tn#ElJbAo~JyA{QAAR2t;vmLaY3S&CDNy%#!3DDgK7W=#ze$EsuBHWh=F+ z3@3|RwX%BZ+a(UWVlLkMExg|#oRY>lG)sq&U%d{iMB!*9#p!Kz-2#3(Cs*GW>GQm0 zitAA6npd+&W7F2*ZkE!w*e=ox%y5~*h={bCZbvg_3F6XC{|{kr6%|L*wSf*0BxrDl z5CQ~ucL?t8?(VJ$?gV#t_rVFlg1fsr1a~<7+V`KUbNXT~n#FX_>ZCt07)Q4-9a8Aj^}c~ ztS{h$Ht%{&c-(@x+$6K+v!idlh!et|nl#R~iU`{MK7{>nG3sTjtxXWn9Xok=c6LTW zLgL`y@a@|-8A^sPU*L}X*z9+Q63?kLbYB5+G|&;%)G#P1O`BFb)6vnvfWnBQaPaYq z3kzp?1z;@DvkiDHhj%*(jChgDc|7N56uLx`0<(~5aXBT2G+l#tn>#|xIN+|XaMC{aQ+tmNSN z2!(m!E-?VhxqpR?vvgWdaXS=S1qS0smv|-Rk4|$(!Jj&=N9aj^+FSh~cp#^1CXA7%p z57xhLaDM1{k%vy7uaBFyxt-)7{?jz&&MCSaZ(OiM5m|X!q9dMd=qF=Jp}f4ZclmH*Bqu7 zd=>0DE;>bcjbwkK+t~}2#RGlx%y2XJoAzakFj#b(Td|W~SDkfQ@*Hn{i#ilI(=Y$;^s6JCFY@^G@J(H8Q6%{m`oVcKqlar&S$&#w7=!gg? z5EK-Yj?OBUDh(M~(D;76dd0Zy)C3FCpG`Zxuq?X+e(UVRvOhL2tau)Lxwd;#s##D( z-Wo6UKiBzW_$@pk|GCKtrdMyvT>Jw54}4yLv0KzpB; zx3yi#OhrWn5|EIPaDA`H9n+^k)$HXyJ&v$f&6D;~mBb$ldk0H$_3)cKb3<*T?e^B` ze!*3hbZ{J3$ndTsr{zh^oan$RoCG#hDYO3OWj7(g$|IdNSH?Jqiv7ei@&k^lPMap9 zV8-nq16jeD4h~e%M5Ej%BDH=oUb;d!lku}m*23ooU0L66S+e=*?GqS%4UP`-`BD)P zrC6Emj<)OJg@mw3E*{L*DW<}r_qBCBLfc=U-aulZE%+*u0bkB+jSJUv_|h0R6?Q?t z4!BWwMRg!Q)7vGWLM9UWK9McloLLFa&j1J94Np7mL9^Mo6$A(_K(MRgK;de9N5O9o znnsUxA=cV+Ca@?o0M)q2WTsM$BI@Ln!`W7~s$fpRfrx)BIn{%kMt5eXRl=ep)THg&8F?EL34GCFN6EplSiiM+rz7| zfoDsOs&x5$j6vc^wR)e|TRHEg0y{(G6RcPLRsI=SXn}H@#HcJ@gPk2@wGwmhXsYC= z!wdK{Tq9C5kOvj%a$8xr$13bYd67=Xv(P(%WZp<`!3LC*f}k$c@V}*#^5v8$oZf87 z(>C&IbRY(6SVTvXxESIv_j-Q)xm9k7z~P+kTVkEqq2}_7m6bN?x(R$n=VgQZrCB{y z^+=rQpiGae#|RX9YH=euIc9M=32gSH;onFd7mgwww!o!bV_2lJa|FTDB>d5~gf6xVx)XdD)-n;^&7@+tCz&AGDp8IMN7DS|( zTpHfWgAQK&KogHeP7xL5V}k3A;ks4ELRvsTceDAr@I7!8<(3?y!?Mu%iXdh%;Pmmj z@o;I%l{_>rFAtqUmSu!{SW5`FUP#EmfLlvzE2IEW*4CGsSz5w_CMPF}S9DcWRKSIi zVG^YL9ba1B4yLrO+FXe)deUs3VzM|KcCG`w;qguqs^9>!7!_LJC>?L!C##2riPab+ zF9O|%0h+GW@@EXd;)y`)a=LS%E%#yfBm5H9>UE_%L(SOjcubT-;V=aM?$_3oB2;NM zYZ#MV1upS{p~cVXt;1y>r(8(Q$Mk4r1p`!(qod{whgF zo!z>w(Xx$Mbe3(R?B%K}G`8jNMg4llxYB5F&` zKrK|$;!vHZctUMY8v>05%*FX4@`Mh|iaR(9eSf6xTmE(kkpyfkLo`yfGC&%Ykz+)0yo&3GpXQ|NHmv05Jd@!MOXtCpi|` zcF0Drp#y-4C@Lz}J4IK12Q&-9D(-K;X06LCkPrnXY57lT122>8icrnhen>Du8*c8w zdcApHt63GKvnI?y0L@vtwuBi`jGDH#wvNurZBljDYpYJ1D*&mkwtF}!C@26pw1Wd< z%s@Q-rhM0w#%6m*2gFAjniBx@w6m)yD|_6}_5;99L|`G0j5JRE_|S%AGVl6=sfR>$ zb|w49JFmmM39f>$GSt*ue3WzPO~3)vXhboA1XL8Kji8Ja(67h_8b(lQWxfa`s9dM| z42IivH_&cTJb~fF>zRR^?r82|_!zkZ3smkiex}$t`pU&UAg&kF9qUb>*yMJU!upAXyOHfKqcZr8KZfKk~$6w8@zqtJJa_GGcp$%FJa)(!n9gi<56)*Sa zIdc05ABrgCM60TGIkq<3aU|`NLK-)?f+DPB{@Pu6BbUk@K8bbI5UVdNCrZ*ARGmQ zkD%{m_tm$dfpNoiPGxYkgf_97HWH(2B1`XcJd`9X+oe4W6V@(?J4J_7QbqCp=*)je za2#177gP5Dip z=9(yKY0n~FR#2iY)CWgK#wMtipfZs&rEZ}-6kRK<+Uqf@;ZhQkiClluf4SMU!`{vj z=f1m$huTO*mpr_GwP@YcF6Zp*OhZpECnpCh(PoK*hi7SJ#a{cx$LFP02W|}w?j2IO zvaT)>Hny3*eok?*m^X5z=)r9|m-TGpOV=3pPMcR`qa!J4+%Q>bMg;*p%^D(P>nC2W zAcJG;l?=q{kWNRbTU*yjx0Dk$%l&v^$XW)jbFv26))v`V8E{duk(pU?X6DOAcR(vG zJ;;C56a|l^L?)f>?tFdUv1M~})39~{Fp!^}#j%(I5H>u>o3Cq;)&f8_PfrO^Q5~)i z6aWf>FI!JnH!CAUTSMbL2!LU6adGR0liR^QHYSBBFdt`K5F3=$t;w2%k~S$zzo3J* z8fqd?aa9U*^!+KJN`5hyCuO?hJnj4{%Z@P(|2A`Vyai+t$X^~T3uj}P@p~mfd-K1! zX0~~jhHhnlRhyJ9?(CrboOx+wX*oH}O-0|hrLb~$e=Yv%L?cvGk?sDI%a;b+? z>PLz1IYB}YNCn^Yu)VhW4!0-m_)0!>@p)WVP2?zo9o=IYZb=Q(&pLvw#M_)2;n_6i=IZoqr+V0=3Zi+gdn!K%E z3|B{F7$KoWU+5Mo$9TLx3FRE`8Vy@7VALIvlV!BdaX0a|N7@>i{pcrL!dGD2AE%Ib znbZm{5`GKvN`6=8NW3D4IlTDG?tRcFA67PyG!u`x!c~aGC|i7*4Wb|oNM1Vs%`(QD z=d<1k{|7pjqnQ>Y7DXG4(X|3MS*GH+2KU9w-Y<>2`0b)>9ZX11xu0G%q)lB&b)Z0H znf*xM5uVzoc~Uj_rE4V1fcO6{^_mDe&JPQXOryHxbh>`~OPlA~xE|Qis}-P+NNvt8 zpA3;tTXiJ;oDXSls_x>UJOeuMO*DOs9^9U`tk$?WWV$FnKQ3%i+$3XKygd_=GhCuL zTi=fL7lakVW_x-*I)9h{#hRE{n0BzZ`unnf7`4Oux}x9spe|AG4&~rrxwO344J{Q&xNb7Lc+ z;PZHWIGGQdJ}N6Eg@A$*6CGVuQL%INkd~I#*x2}Vcb?o~71HYj*?om%ukt>s4DqEl z^MZ*LQ%Ig9?v*q^rmw;Jyhyn7cWI9M$0EaaSEk=N___gK==R(p*)s`>9Dnxv;4+)QMjF3aD)f0YWlo>YVy!Nz^vfCO#r z^hCT#gAep8hs!*CyvE-a+RmHF$M}&q*4s|rG5Ocr>3;wIkqSOEycuk6qi}3puAKfVQF6UGCopB9(rhH5LQIdo1O+Tv$MC%B^`y$+t7Byx zjo`zA37biAn;k^4k(oPQ4cRA7hD_%}4A%c>Nv!Gn;M=~LWpA1Up-|kPw{_iYu&l5iG5-pb(8b@YS|2~Dv4?~CN5}}*!RXvQ z2(`a6l{?*euv|ZuHjrTQCp*29%i&{%_0Nwo0~W*NX9U*+AI?VxyI<$hhE{5xBtJlc z81Q@#TCB(322P!9k#sp1dv<@881T6&s+!n7kJhVQCu^_Ij5{1ny(bA!Lqz=j=KYo1 z^>Q!@|5vjQ`}oB!WCFNGlcsvS>#TM=BIqRqLz;N&_h5U2msJMZ3JBzX&hA=pu~r%X zR9lQ>S3O)7**UAPJLYWfwE5iVwT74<#xUDG8_7n?MnNaq zn>ai|E@$(tNq~UkAdnRmg^`(l|NcEaIoeSn{}IgeIS$EL)1O`sFu$rD4?(NL;+W({ z=rv8oQ}lah?@dbw>07kwaKU0`}MK|o5&a=d3X`hMhRalS@*QJnx#!|!b{?};_i2u;`8s7 zjiDdap?vvWew$8Sc8z^CQ?9uo@QQq}>^_0mYs5vY9{vL%%FS+Kd3Ufq2q9GM#*Z=< z6tThRSmMIVDbv!PSJo=3^~v_6-H6S1buQzLb{*_Ehs-&OdUT-WdA|^ME!)Pk(LdY{l9_xvBw%TMv5Sy;LUA^L1X(cY zhYq03GsDFAIs`i@m4O6~P#%~Q?CQ;LI%%cul1kiLRc-UR1}*gY7Mk6wN8Dz4{7@l4 z-|6(miQVnD=&9vmG~YZeW$>18O^9Die9JPdw)hPDiu&SDHRxGHUQDcZV0i<3{dU7h zlcbXE*4t5OsP8reTEPc6_u9)w>; zru&_)rRtO!OGz)^X|!7%f>0+sW+OLUB`eJ>~epo!Fxcp!STEdMmGG!Q%v z+PTX9fjN;o`HM%%HoDu1xC99U6XefA_|i9#O;6l}&-@Dy>fiVF8V1+fPWK=907l{m zey=dzDewD*vvnWWqm7m+LE%76h^~yt?3GyA8E9>A`zI1PYRgBz7n~Pn4f*8E#I?Nvsirc?X=oO5!^GL zR@17YsHsf@go?a(%QsKH_j&#%N1L{v4GL;B-huoXHun9%45qaF&4y!a&{8EG`HZI8lF*GHA%MtLY629J{( z$yh%%2hW)r+I&kSs_$N|mWoK8bdOF=#RbV@09G-LyFAl(f;c!hHR=_6MDluidNf$& z$%pP|aaO=Q`G@>J6mxQN0-{Pj3h8s(uN8f}I`R^K+U|KEo?n|jVK6!NQ##@~b7OQ7 zbkaQ8PA7628tKny z5C9SvCRw;;&mkx%C@U*l{Yqucw!E;=Y&Bn1O~hXA(4uW-w%@81D70Ce?OTSNDJ&$^ z`TBV5DMqiLsR?f*B(0tI<6D)u5_s8VqihYU>3>{55aTCti=T6(HPRHnLP#fv5UbXZBaf1M%_0)>q( zt8MLE%+mO7cW0|=&%5_tRoB@um12Fq9#4;M(KgPK!+_FCM_XIhA)`SZ0AXBRT{Sdt zWu_s&{rvee$E03f&-v-r1}4D3*m!0oqOYSBAQ}-65O_}8(J?VG)v9%U?*HU^p0!6u zM|X7i?2lzWc)vceB1%e0f$i4U^2K6YPnTxfpz24jQ}Zh^RdW;b%7?>;t#mVRB^_DG zzMJ|n!ueN01s>2d5IOVA8TwqxSU5v@FW0)RxoxlNxGAD3 zN`GG~S*fjAv%gvEMW+Abt@q@8n=d+4b?9gWe@$%mo9uU|l_DjSsbhJ#%yN0cT=he$ zGiz8J^8rI|IJVe*$-s>0>+kQUO`(fgv-0L!|ND6JsScI@^1Ky6j}r+a$18neecjJb z0)OkX#ld?5MB7TNI`|DdujMcDyDAJi5^1kEQGafem%X~N=7Ft#-CuUV`pkCCef;BY z8<4U3A;gbYrGj}hb5C&?yynX&o7~St{sz+zSoLew$;ikIYwrU}Ys)x}*Mee?wd&x9 zD8F^;>~H#I=&}r!RlbU7D=JQ@GHYa*M6S6-^^Q&?sm=;iCV$$0jk@2$rn8i+LCWB* z?Bkfe3OGU~_z$PYZg6i51idC-XEqTH2_H2=uV8FfcIH9J2B{ z&1Ep22t6S5e^(Ho8Sapxl|{2i8yg*^rK3~*g0T>3gBZ~lC;l10Q5x6D^tU1>o*?}e z|4xDWOB;}ThbH*p@$X{+1Bf>8+}HyDt0MeII-rXp{ZGo#znwN*2T|`@hgbR&ftZ*W zIePN80B}aK5Doty<^EqZVu_9Z(a{mx@%y8jCBTeUd-_4BT0~JXE+PV<3Cc2lXlSSg z_3v-fg|CJSvCsH8leAHW_y2d*6b{lE)55|+1JeHscE|<4|9}1Vzph;kOa5<}^smHK z7sYhB0xPk}Z)t%aBVL#-rqNiRJ|A3GSH}b#jEuF-%{ZXXXkkFF(bmQtGhj8^mo#V$ z#8HCujf_aKB5H}(f2WpyqUVfZVVyT<`~zyp)9l`;(R(NtYHh7c8{FPnME(0xjZlXk;WJ zB4R^hqs=C7-m1)qpEzX+1K>qKLbgrqtN_ zkGGgH0slRD66I{wM0WYhUp$;`Jp%~X^coE35sa^EaB1zVO=T@&hU)J^v^$#2 z;%pF#{wM^oqnz{MB?@mI2#jK2W4oO!FqSHF`99zOAQdZPk0#)Be7?U>EWKW+H3GW6 zQQji)&+zc@Xz)QtZPo0yMHXEA{KOz&5sHtCo5BC<1H#SMmrrK8vMk65U5SXisIz)| zTSZGrLswhz3Xkh689lAq)VWkXrmpVWY{Gig%*F%szvJM8r(8p)3hVt`r*N=GsTsB$ zUZd021ZFx|G*Xi=XL4jaR+`+E-ZKJVWrT!<0S%T|G{K;^>$Sw^IN)Z9IP5>Nu*@th zq>S&gAOAj@@L2Ek0sZkbV;2(@4Gsyx$Hm><-KE5Uzv%}k4z3cTrO^AuF+x@R+Zh0(m<_kFK^lpkDFFR6IU3(r;Xen5>^>YJ=R;PTjIas!p{y(Fe@tATzx z-)~KP8@G07QMzUVa8#;9Dh7sFjrC+gX>w);2791y2F|Xnt&Q?Fb2+Q7yX!y$i_v_Q z4wV1Mi1ZB>P|SmVkB+K*!8nQn&N{icnAEDDw|g+!idF^|1#ficzoXEyayg)(ru6xc zykQ)1jQF>1z5`8H2(k(ggmRk#YlvlI)>otO@~+62;cIZzc7LlyjX!` zZ4JK7dY?WvAF2K|63ERVabW5&}x_lM~Z0398UD9_`pALR)qXUr?POBI<Ba z=8K0Gwor1e{2v;9Vw-53WxH^>&xI+>viRrcL-BVa$1$;eb?;pHA8O7IyQ<;AY z+b{8O^4uYQbeb>Kk|3ioicvsduX^vg0|EM7U6J)?wJIGYuFhBm@rT9JTK76#=JNRW z>E61I0=}{a(t6KVj|LVp zH=l?E%87h%3f-=2#=tGz-e9-As78QI>DsY^HtAoSebZ__{OX*fINa@b^5(0_FqQQ- z^~k0?FtnU*M!Voj!bDEi6%J(198VMy6O7Gj0V;$t;r#pFFChI!Jq!;kL2gmuAdich zh*q9THDQrT9@fFSw@o70DOVJgx0IsiKjzyx%SzQNdS@3~^J0d%6N6)#gk0$Fa--9a zWj+43f(X$DKztH{Ys8JJ;1T@Oi{@$|NCN$b4V?lIgfsBRR8S5}Me}u>#>Dp-u9Nsq z4KN~FoM7|VMt1zOgUb4P4bSYubkV`FpKMDVRFxr^f zV7KZlhWzs*z2I$*%9C6Ha+*1W#FHOytSO(t{Fnya%F+uug#V1w1q$2-o2Fu_bd%XQ zQH%ELit9H42GbFoTJ^}D;{Zi%?TqAPu;nb(&1$QI)2S%P{Ly=}b=>l|Q?6@$?B~To zsbm1@y1sS>I%$^`L=ZAEGVm(-Jv_|J#B?-OC^e08FN*NIQV0q0bFlx{%tB2=Fq#uS zSIp$Yo9~>_f^6dYb>Z>p(69N*l+`Zrt6TnUnTDsA2J(ZE61y(G56$%AA1-<#PiK2v zt8q$%L2wNf?V{rdO|cV)`*YR0ytkxbJ&AS%gF|AQ19Vt-X<*|j&hnkJSJvZVBzVS~ z7moY%A5-BaI#z6QM3v`g55lnI6NxC}#2xB8SG&)tNzX@-)K6^mc;1Yr`-!W9{W8>A z+w1lUxG0Jp{eB%&fwEkUF5CMZ+hx^R4^vqZGg{Umc`w;0PiJ}44I)~txbiKvrLC=U zY2o^ks{!PGWdpz4TAfbiNJAW|^|H^W{_I2OI9rOw2&ND7^pESF{gUOYy#Ojk^um=n0s13sfyMA#o?HA`a|3VxUb5Fr0mkK5?QObt-M!eFFz0Rufz zlPwt@jb$>3c$6^tm)$JljR}At3n4LVkMy>00FT;umn)gVp~1Z~xLs=={ikPK7qpIw3N(nT%9i|k zEJch}U)A$viFE0<9zu2{2}6Yj3k9Fe`h2aU5di?&BKq*yta18B;W`u)X(jR$YxW)c zUwAknKCI|=byeP8ahIOiso$#wPqWWQ)4Fbhytk@tvTsCH$%dSa z7x^lFbdF|_@;)B5$m@Yw63?9hnUCIsH3xUQ^UTr`{`8$Vy9e_oli>1Vq=(tIFcYha ztqQs;l=CbDQOKVSlgLoo#g)&T)>C^R-;Tbf!SHZ8R2c7$tzn4v;*cB&5Ka^69({^| zB&0O%r_q{pkBx~JJMAU*gqku#_L228L;kzVMPgoe5Q9sz%sR5(>z^w>K`S4;D!ch+ zm}7AONuvRB#gCY$mqI(%lGq_ZK=cL@5#O@|p)%tz9{q?@h>vQAh|cyjnEU(>u(3#C zH3hmdrCPPAW4kvO>ut9HP?rfzPb^hch5c7rbvQ}??yPZ`9sC3j`sh1QR({k~_&%;1 z^=SECI-6VZ@=()}WLDXaq)g$VHfm~TPonY=nG$z!;jF0tNtjsoV8+S=O2a?73FT?heF z)4fD$HNdwb9%qdNT}3uE!@xerpI*7aAOR6UqE?buH>@k2QCbmth4S+vz1uaW%Vz5M z#75)|%LNR7@=gFJ^4X)$7yLdOk&MiTJrmK>_l6)XFNKY8$zozsqz@y7=L(0!!I|xQ zXo53lGc$y^5rr744>@zlckFG$tm7RCJ|NGU7_0m`iLKT;%B$`VOpmd8>|L^1wYGN& zqP@|ansrg88}=$|$X>KlAap!rX2TIuf`l-N9P>hKM$z|RG&3sh=M~;1lT9((7-7Al zzJnN_eQ6HA+iz0?6}gXd>e`ky(NUG<*Ve=M9g+rkcMTj^Oe5kh?h%3iQ2DU z<$**54DlIC6deuqj}%|g999oPF~V8i7xhqdc@ux}Nh6L;>;Lq<0@VUd*+IeK6m=%% z2aqE$V_^kUl>l{4A)5j6|M^n@py&uxh{>s`%kkN5feL$`Znr|$B#$iw=>BBo(=25LLDa(Lwgnv{hs=E^M3Y?>I5^N$joDlDbAGXU z;5Whlv;bvM%WX$|rMXPoGje+L@SJ}1jXB|km3>rD^`2wUfdaw-^2(}IAdW^i%puQl zyugJC0fF^V>H>kKojTU{xg?2~hb6xZ%f1<+$9?n5Sjy!!n~Vuqre(WGZ{7hD^SjQK z6f;5QwYC;*V&}nMAy7_<>)jpBdqMK@$~M<-vjgXsVjp8PJgF>goavE+D=ErX+k&9-9?# z4$!Wqrltaak#jpyKP<_&XeI+Q>+g~QH zE%ZF2O7yY6{cfNC3YB7}Ca1%?)Aex5DL`h2NK|{DJAY!*26f_+Qn-%={gR@mM+xi( zmqk}B@Bwr~^8y6~=z0JiAdU*k$|O;0YuHJJG-Ntp(OEKUcL^iAG;rRL7WTCR_NPbk{^ z&RQ(p@dz$xX8K%_4?G*|E#>`wkM=d$`9v{v;}Qn0sV`x$*eSP0MX*Fqp2G>XxTKG+ z3_Gg3MHkPVc$3|uacec(A-m3UPQPV#Zyr4xu7;V|QBY6_q=`BBvWanU+)oy2p$BtNlENX-Immoa)_ffB4l73pA6E z>J<8%lz1f}R3^#aey$jG7HWGdofr5SuC=Hr*tXqTM%l&gYVHlAAN_^!v;Il$Y(e=F|^ZP}$5&+b2sz{!2GQArIjj6Z}b} zxr-os-9`D~c%jB}m@x%mNr%m5CxS$q@%j@TbRy7d`gkO|DKtd>`d=AzOea)5H2HF)fu zpm?Do=mUHZzyuxjE&@KiFBm}hcJ5qJk!Wspw*+-*nssvo#q!mNk@dEjoE>53y+Q97 zqtmnJ8ZP_9s(KEUM2y+ZtA(i>A1!x>{h7fWg;>|!+y?sFlh09Z7PXlTfy=EEEIam? zUfcdCL)w8+)nG*@5E7R+{EM>cnPV$HLqR7}C5!@W%^~BW!&(nZ4EL*7xrD36WHT6< zU-ooGei4tqj*8QH_1v1;R^@TF1K&+dA~@K4HL6FtvTLF2K9;kqNYkZm)8=K~rh6ogMhM>x7CRFv@PrFEPya>LF9Lpacd$kE{*XyG93h_svg&dU19cFjV;fP-(iYR&3E zP>9Le=(=VfT!+|RTzQ*y`)o?Z4~{j&frF0mGZ|&PeXwpEH@hKBw%K8}X&iSFvwt<8 z&5f~l&e^MPY-D9*42z7EmX=no7E{pB07@!A>OZ!9p=mGhL!1Rj=m*O?T3K1870t41 zIw9~&OJ+_@GVy5i=j|T3G1!mzZ$L`HYmR4fQ2`8=r%QqhwG2I%3s(So>cXZV%h|<3L1Q$+$wi_{UMgT#Rqv4y>|rr8>4&Cb zd~}hWtm~xX?U933mG=)W4|EP4-?@^V3~`@E)t7C&O%)}xGWM*1H8gbbe0_cOyDl}| zs>V0M2>J8@JJ_ayW%bO1=S)%=ip(lsU+z+NN){U>JJ)6JLpSL}XUo13kst7xu0CCy z?XIo7=<8blTUg6WbX4-O3QJT$>C+R1mzqxiQlEP#qog`}FaM?PT-QhWYlr=o34W_{ zQ{o4k>%wIgZI!>XoPlqc2-!O{rfa3SJ?M`Ld5st?Wg`X~yr?7O9h`+RLnHTis@K+J zeh{*YTc5&TfRp=ky!<8Lihb!?3al~E>HJglzo@ipL65o#Mjgis@HzV`=4}ScNKk&( zh^Re0>D>tjes!TN58Ap=39kfuvjVB;WL`Ag8{0}_%skc zQ3FsCaJl_t6ck4PV-tV}Syk3;$Ruzh-7Jk=T$K2pw(o{^{qW!*)F$MEWnMu+fhZ9z zJv~4Bcx#l(gMoHaMo6IxDpQ&2{tjLCJEHVj8J*|{s%q?5+C-zp(Io?S`roCQiYx@U9HjL@XDDQ7i?EV z01T!_U@(n^MMeGZ(f{=?v?5IR?*@H66*`)N^Gfj~RdPF(%pjQuD()&=c=<-9r3d4u zc?gvd(BFsr-27h}%K!bR&9cg8ak}?8$eG!COaYWEbxw?rH3R;@CQZ#5sx>L0On0xd zV(pC>-i?XB3x29d+lf$0R*@6Rk*G z`-+S?JM3iHY*L*&5W7v0tho$#wKJ&T&dX)CU?uBl%3MaZMd1$dw%uaja#&Tgf#hoc z8`~d*i1EHZ=adb$d+ykmlxn<0YnSiC>o$~?H8e+_@>tA<|Cp;_myaVg`($Z% z)k456#q4AhpvL(z(ZzNAA}|hbp^IW`esVAF@h&eSTJD_L)hjI=22>&|SH34@uAzAS zGGi#TLe(n%0}o%1#I=^)zrpGFPItIkaZ-S3fhRZmTaeKJU6$+8=s9v(QERZLmrBmm zn2om%ncB=33@VWbc)e8L<1NBOVSNg8(JRd?YYl_4ZCPRt!XB5Cr2#o1)k&^TOt5kl z|M#N~dVXfqy{yYgutfyol6aZUbjiu=O+sg%^ZD8xNQ2pV&cS(QkFL_%jY0-y21{E3 zv7#^JJ{WP1-+1i}=@o>zC!xjW&!yBLidSc1PwQ7XAD{`a*;edCc=N?0qN+b|bK}H0 z=e1X)SsY;=l!=v;4Mi|*pJlBRqwfR{3NU}{h`;yuXPr}$PiE-#s=yr3X(&=~&Rcm=`7xSZ!gb9?OjagEKbinpzl;2=x(;iKcbC_Rh{9<1AV&%jM31 zAg=D~GxK(*oQr2kU2dPd#bUpCOPxheOJfX(5TJt$Vi`oGc zU_XkUe=ZE2!k9_J-rnBkfF^ku5F+#%UZgM@>ADRjy1fx_-^2{ZC^|`DTGL9!yk@^N zJ9=WgFa?bx|{dnJ2c=f2|=uZcSYs~km$2sgZnjf0F zXMFgI+MoZn=i|v_;{+1(xWeD3qb8c{&(4%l-=D7&S5L*lr@-do#9;#Is_UdDsHa$= zTLc9ah6h@8orl%pFj|!ac)nbuE%bB|44vH{n(-n@e z0FV9X%P}3+P#Ik3OnZkOv7Q#=dysshy{X&5YI~d{W>hERc+^3JenK-%U{9DJ_fucg4gbBY>!E&tG=~^X9JysO8Iz9k9nmS7b*j zKe8a5w0pujGwtzu@bl>%lb8Y&2j}&6o-iPQdXi0D(TR24omou3;54C13hu4h_Getp zs%TaK?*f|~wFY_A`tVrbDVG-e>UhJ0{I{7NkF&-zF^{cGrTOeig&SXJ@lx=&9WYijhNAM0u`jo0#F{$ef4fB+!IM9$}Yo9Z*_YJgNmDrY+ z=X97thor_zW?WdX0a`}G(z7$z!Hw`g`33MwL?J}Fp40Lgr_hC$YQIjrOK8olFQeJr zTlACDm8HtR(b2Se$bJ{laPq4)dz|5s?!~G-nGY6Zd1-s1ez`qUD86X${ z69ll(fq^B$W-cogYp*cCGzxM68!}^nK;i9T*iEB&vGnav>PMgM>1>L&huMc^Qk6 zf<>txvmU4HNn5v{!dwoskeo(TbREQ_UC)Bf?EA^tF45^a0h>Zh+J=*!gIjPeuXKw3Oaw$tI^I!khFCg)ZtZ&Q;w zk{jjBS{m|$wvNJXfYH-<@saB_zIl;gGIj#oJL4DHb%AsqrF?8F1(1Lw_q#{UD+svY zh1kG2E;;j(=-@r_laiAWo}tN4oU5dYvN&9cxEK)dr*nA4BbY$%g=}ZYt2JMa}|GM zCK^`B`+(VOtL*duU8k^FedS{mYgaRO5p;E<>!~Ivh0n*cK1)SMXLQp(txn-oVynX2 zof@xwpBIo``d1!vo^9Kj`G$L!T;EwP1+Be<6tuUm;c_P8^S;j&24Zo~687iA4e;Ap z4QnR8%Jou3>9X4%Obz#$p=8k_6BV!ie4UHGC3;xW_>nwZQ?9cWaQHkLgm9ICBf#*G z_IZc@Dl)CSeE&I79SV=o%=*D7PUZGY16@^ji0^T=&Z&nT8t388(Ns5-rpt#^lvDm{ zYL^BRbaQYN^=awx_Qo7q=1&Jy>ji!}s8U%T5i7rwMK6^IpMJgp%&Ur|Q9?1iY`?jd(8qB>WKcZcGnCx1d!{ETZ75fM2RvQt4lb|I zP%cH$}mYL)gP5aZ+T@4pmfHlEet3G6V$>EzC{ z79r;m*NjR)Czh(GDFL-FQ6i$2m`Kg3nXwnB-I#j?SP}{q5Ah??AwcS%WM3y^U+xqE z6INlkm_RBOf3g2}7&J2>ePc7bLNtrx%<-lmYvkWOTJt6)M zY)r;-Fuy20UC`7?ls;Bkk+)fRZ0A(inil(sI=36h8qTdsfC?+zW#N&{z^8dYIH*e~ z;L#i}&g|ZP{YZAIJvvS#BRf<)BI4pEH3nM>?m(_orh*XY=~a&C%*rf(S6N0rgXcfP z$cm~R#rYJ2;k{B(*j`*Od2I2J)F9D=S4wJQYEcOLR#>Fwl4O6WIEAq{=Gx*=dN`Z; z^&}&q!7?`!bIh-j+5N%r&`aySY{5yZhbe(YW?H_PduM2yQ50Rw&7}F}rMVn~`~Px6 z1x`Ai#xqZFKZF^6N|0<*ni!K>bQwMAg^Tp4oz85m*g)OY--88xG7#LF4IR5 zg%rK75cb-TUg;H~5-mQQpE6wP({^zecpt8Vs8P*y?8r;tRQG@CdLluOIqMoPKsl__ z4```Ttif@6T~>!$rp+{*?iV9lBp%iRgP;l7#gqf8yZ(mwq;%Z4#v=`lkQPbxwDy5* zO!cJ8h-l~jKx#T}in3!YNeJ=)-UC|3+!7A&rPE)(elbU}42UC{3Y0SYlo#(LMV0V5 zd!WR^utu_G!8!*+5)-sNsDmjoNl*WPFsevd?OioO^UYtz>^JpMQMVqj@y0=m+=NoP7sfR{qk@d`QvvpxK zAUi2Z#m&edJwJ69`41ypPCbU_VB4xH1kYos? z?FmIB+AuRs%+zWkX+(0+cil5YxJa0c12peL^l>o-$6~tsi09KS-Bn?BP_YaRA0yL?W@`qfg{~HIVg)-X|;3@_Uxwp$f01audV%YLH3}f$tKp5 z4e`L(ZjBsYUUc5L0t#a1t`+bzR&~1DmfEK>YnnFV^*~S9-`?~?Uxm}FmXl(8+E-c& z`{7DyAN-tE<@x}=ANdkeLr14%9J-}F_%~E`>F1t3dnkoXOJP2;s8&r9ZeStah36|( zKIg7Xy^~hZNb7@yhs%}a*_8L?f|_%dV6*T!o+D)Ruco~V6(7eGV)}$wX{>e z;ugO)x_Y@NNc_YBZQV~6O|N9#&M)6{Ac5_kj>R&&Qm3XpXrT5T1->4L^5~NyRfDScuPKam+c&$*J~@YjL?B^X;9h{wEMSPWbbZE`382B^3G!(YxDAwxdx# zw`&zte!Yc8nh`{7j0j6S`J$o0Bq%GK^lZ9X>NV|#^R-U*JVt3+=cW`S z5Pq}tbc>!O2-iBUJ9?{L#x*4$m1u>-s=Z7Gfp+J@p6i$Y>J9B&DolihsJd7w7KO?3!oumB!{p`aR2Y}zloR4sd zMju|xL^j22NYD(U}=J zR(4FNyTSS3ar?USgZwzi4^R$!P^2h7U+@BvGv*DYV=y}a*faeb?h+`&=Ipv}#smS* zm+kI1*=?h`{QQ1ZRklJY@8zmFyx!Y8JD+l|nlX>HI4*l8mm{b;*FLOOg;rgHD?5oG zv(7u#o=mF}b27}yUUR^6e$Xl|giUFNn(r;kOKgnMcC{By8O`1%Ab09KU2PT!#uvO} zoZW>yJ7aphrFgBrT&-#7EarLNvDr@MV+4PCTU%DR@z-w7Go|vM_c_$dKok947P%y5 z{JIZ12~z0a7BX_DxOfpC{2r~~{6pf{^O<9l$H{!rC9SP4EVYQ7#$ff{!mc`A;S0}T zR@;Ff%lde~l|qtoVd17vl?2!QAwtK+%tn@jEE$ie{cEMBxKxI}_IR2cnLW6|=Ir9p z46CDY=p2Jc&qQQdbIQxxYqRTI%sMqUt-3G?$r>Il*ct(aAe(T5?>YUtS`~ zri7|}Wuf49P_nPLbuVfd`&{6UOHJ^1TxqPIL?GtBcYiqFZF{#wyVCI>LFQ$3^eoT9 zOE9l{Q7^f@d(*We(DKpkZ)P1RC;{iZ{+7EMkX)VKrB{1}pe_;r`+~Ojo zA2%7D&t0&6SudYJz)+xwL5x!NjxCRo_}jOyPNb7FP}$0*b9g%_+IYklfinS#s$$^P zJ+{+#+Z-SpgZ{(bM&LC^#0PX+ii(oTdmP zd^I6fA$T~gf3N^=uD$201Ddj;#UmWxXCB_yU3edC{k4rrkLzOa49ktQ+9j?ZlQ{Ud zUm!f}C$&MU7>KMjzp)P`U0LIdr?ccGA+));v?f0D56!E;h<`!Ag8MQAW*+i}j*IyY zB=`@9W6U()JUhDO zU|Oeha=WBRuF6x;j2#YZY6h;&?Lp-+i!z*Kn$?Lx9;6!U2aEY03w`e8W6`R933>GJ z@OrzTTD(2ivx_;)z-}0EIwOS{uWocMk3?9@SBh96rsPnJk?h%10#q4Xl!f*;_#z`+4z%O zj$>k9_62m|YRy%8zo&?vts_k0UM|Z)=ww)7aMBsf4oc<==z_J_X)2Eu6pKqp7}Olw zss{*$>C>aF$W$;Sh^!L(;9{{+BrRp?17`AZ9d8P5~gtUU*J4-d`=k@SpX5N-R5 z!%a34oobE*y`5!~@N(Q= zGUrUll1u&c^lV)OCF5y(`0=J3PUyxtl%~I2QzzceemOI51#^jOU9gm5v zI$Gyqb{*d_zYLFuHVdFWKla7+@IgLZ(K8cCDdzb;GSz{dHb#H0H)>8p6SKh}BJLw9 z4+}D_Y|O$xc2k>B4u#LYGRWEMTJzrCQYvFy#7IQ6GcXOz&|_nUKXJ2y@Qa8FvaaSA zBz^)JUF$~)(nlxy?;y(Th?xGd+Ooi+$$r>H;0mmT?V*zA%UgJc<-7?KPE#qitL%V| zPl}RyO^ihIL?}<`HejrTu$H_tJL+M$TZYs6D&v!giP2#VMB;S6;WUUV!!HSqZyknC z&OXm?OA5y$Z^yTQwj&_kP~>8rcoL+0hXEG09~1LvaF7(ZKe@4I;uo>7U>)jT_XBqD zwB9I4$CQcop8Q|ut)D*cT>A_jGNQw0HW1OYDJH8jZ}yCArwj{_W9n#8X3@w9mk?2L zso`|;0Yb9>i7iiK&r7Jz>xKJ2f}q9aQY0yW#~iRzInr7`xho#7pJKr>#pKPJ#@M4G zv-2Yg{%CpRPR{cuCL+=n6HCv@iTS_!3@~dA#&5ygH2FiPmB+cle~1N{^Bh!BNLtH54rPKA^X2l4)SHt>1CgDSL{&D%fc0ue_YV} zk2bsUPCAW0KN*f=@I$03gH}a0Yj2&9b@u>MA2bBN9W6foKDqPfW%89!FNtHVZ@$YJ z7a3bFEnzYM_cQ(l5k-_#L@~Yg|C22Fp`HrDGS5Q2l~C#i8Z#AgT!y8k@=u2AFZ|y0 zO?Rvs+&)5G2E&6jzbBho{vgBM1UQU-=>hQ*BT`KbWU1K{8_0@W>hF z1dK;<5G)`qZVDy}LQ#{31tR>LO#z<6aRDBL^Jt<%fhLU&hO;qI=YT&y;NGCDX3k~8 zTbokiOImNKlyNN{D|m?HmSeP;tdEN?7Mpb1pQlljDdm0gy8f=6|NFQxPi$oQh75MW zYYTET)mYM%#3iStC4^RV60$>B1hxE3IlO14H~Zg#+4#x$>iJ58pEf{T_f8*Dh$^M0 z2CG|Z^Z24t{=XRzlW#)mit<0LY_es_QCO6rje&qdKbzfUC298}By_UB%EA6e-Mslz zEAtQ-(@ue;cY)xrK;{b?HxCizyR+10i=f`g-rJ~@B<8dCWpi7-SWuAtk$<%9&)6!7 zm4DAvr%sk4{R%wW3kPSTeh34?a5Vz_(AnV4tRAqSr|A7=#Qg7TZu3>*vYrs<}>@|`&G1(&xyDr9vmcBp?AS410AW@tv!YDYWR1Hq_4Qr zr|DYkjJ7Fl*WBi8sL7-j`7T@a`Hxk1-W%V{_6JeMpq+n@X#hKTOW+>QK3mHbvheOh zuc3lnIP>TCPaY?+dzFP~H`P2Q8cE89)ALndQ94&-_1)trF1XzWda`1P{N##FPjboW zb=&USjdPQ|7WL+1@ION~4G~;?)btjxbokd>{ZIxzp*>)R&-?svqSX&AY|`tfGS`0T_Wqd zIx1rYvt~s-0tE)u80}Af-xDN6al{k+;5u)zo%(k=_+n-DBe9lvaBzxOrEOqylMhJO z6O}APn-T&5fOMB@abGHb^i3jKRv$%%Gh(q%L`58p&Pb#qm_ss{h#sC1!-U6=1Nv`Y zfXIwOn*oX}S`? z9f-?M9^?Q9OqMP; zdwMp_?l5=NKRwXtG(VA-H!FyL0hy(=8`Tp#U&W!fMN0BKs^VGvnv-)OVc{MPcU~;p z*urK@#Xcra0xyHZo*=B@guOgFAsqOt;=yF6pcSe^+-&(JrrN2~{`RuyYhDwKsOM4V zi|%qmb=zNuFXc3UQNE)uVQm8w7F1s)%Aq>>g%PMp1N|)ASzs%Q->w89`@I5asVMRV zBJ-F8ZPUw$85kLh0t$@HEV_e6dn+nJ)(~=fJ6KI*^gFIW_+>w zGLDt!tHKBnnB3SPyhEaFvU>aERNE9^Nn!Rij7eLASP{v9#{T9SrM05^M=*k=63VC&Kuz)$Er6HuVq=}UIQv@{zmASf->OvRrUVOBLf<`ZhbBRdi$fDSl5y$7 zg=o%OTL3C|cc(j(M0o-o4hOPOdW##o1w9mq-U*iD#<`z+LE?Z&QGNGX->gF2IRZY5 zbp1%<>$Qq8FX)sn*4(8ELKhYhrg=ZttX z7q-8gn*~!Uzv^zu9&7U@c4?j|rT1)*9IymNjoWmtfGw|0ae2h{SIpvI!t<~2^SfGn z$-+l2wG3oXNsX*2;cW>TJkw#T!@nRu`>9YWg8+&%DnHqC9-ApNr7dWUXCivk9YnS# z-Z~U_MugPSVuN%`<+z?`2xmm@85MW=W-6lmlYr1Q+Ki+oa#B6$u}S& zn2B$HopO&aAlR*s`>8q-E@74&q{397q--A-X_9Upwy}0;_DG^syN#57cYOh_GsH!7 zH~B^g`;x7)tlu=cH#cj^(w_9weP_P#Yd+mN6{0H+OK}^)SCHI5SYO-WqFn$?P=~eS zXIA5d0)RuZ@#Z_vDaVCHcl)mdN%1%-k>jQ2wiZajgHlnsjgqGMH5g*3JX!2+m9#BR9HK4hX8;ssP9PVGxTTEW4xf_2=qXW5{V=$Ta zY37oD!thn^zi^HFCv;|*4gn!T5kGZ0hiCK`h83o?qCEO7o4Oa{!GN{kAJEs|AAX~$ z6qiSVt7j$Y=*WISOh*dyi{^EC4!71zanXK;i(GRtds%s%{D$icEb#Cs3=PWOi{SC) z!U$E;$f&HcWWwiKXID~K>w8s&LPI>B4f7}E)I_Z^Yf}nBkX%9*h77~=yv@iFJxcYw zND~b+bJoj)c1jwnvAjbvQ(A@9lLFb+EkWN5uai<6R#frL^ zT4IXp;wfH|<4+h8c?b(s-5jdxwd2KFRvqy*qsx}`Q(#q=3%rH1@x2Na1clBS* z+y~F77SBocP{u6D!Bs14XMZ)dNv@M1V0l9U?Y1tTPP-L1N4rciBJ*+pr$!f zpRCZ2yLow0kWS``_xAEqFofs5;9Uae;HCwqY%Vc34?NH1XrzoHnQM5wHc*lfR2^nN zF{Uf{w(ch!0i-6}P2w7HchKnT&XbhQX@$L;aWTVk*K*P-k`Y6fgjG+Kpsgz0y1H~* zQQu?x$@iV=N3NSfh76Bq^KorX^lqGVno%X?fX&1~G= z9%Z3oWQ;b$O~%wgcEm{0c=0V)zPR+kiJN&LL&@3Qya>sUc4D}SlsO2eMM|agoF-c? zP^hzlZCQlv{?%EM(@nzGF_@CbYqf{#^TqF9-;f03>>O^wYGrmrTO&eKnvC{3`mp;F z*G1ibx&B<29;H#PB`!Ym_H<9p&ejzC80ph`l=gn)*JYCDL zCvH@{`Ihnv@lOvVB{|jcDr`!)c3CO0ZFyf^$9;^hzj??Ee3)KLd?{%DDSY8s?x8H% zq2y|rqVW<=%#U9KS*>nOe+Ayp8a85tM8~8)$dtC%F`k|>B6Y3P`Lvx9c{UYoz1@Em z@CE#W^#!h$pUP~Lq~1ySu5mL6{--8LlPqZB<*KXe%*As z-?6ZbRl&Z#W_^8qZ=mXvW>9+Kd%VHDeF=g4#%qT!Y-wIT4kQhdfX-oL)~LJ4A__bn2a71f2-;jx zQFXQGiduPcD+&WHe(;BuLt(Tz=i}v@Eyj=-SL{xQ2c~$#B;C9ykfS~QJ9jQRRnHiA zY1CTzsnE6hD(~)ePlZ-&ZHHqeTS1{=GQDi4ILbLIn;r7R&(AG6J*2isYmG{c9RpZB zm&@D&KrL_5NEg)VseP#rO1^)`lxzrITJg5UJM9q%8%QFd{IHW;FH(H5oa^{k(Q_?r9B_A?h723R8yV*7;H;%&I zs-c@QSV9N*nWc+DLJ8r%wa$SCFbI;fPcNUa(T>j*~>RMfglS}mO zqDDCw^r7Gd)w46ru>CGbm?7Iu?^#2+v#_@mMTP{0j2|;y(Arf=)toCAIS3-JV|%~} zGWd^zze=MMj8ngi;aPj%=kS`jz4A4tUYvrKwmaSE`nI~C!aMJG@V#twD4vzcXgn8~ zCchu1pFM|Pd@B!q709?)y$XDBiF^|=uftotIyL;Q3}P1nr4dpgfN3~45fLINvapiH z^)hw#l1bxXrsxljC^k#>k}-jer-f8Chf(+wd$^7}Z?@QHHj{OqmVpoux+A!FG-eOp zw_E%r&)+e`!g??qm#Zuu$g`lN^TYGtt^gMHZ7hMBS+M_?+pTbw$JIUAIp$bEB5~Z} z{haF^JC&41B!Ydl6$qmAuhtjIdJKX@D>U$xd+l@FOIqVONP^vqvP#3fC-_s0DfGl$ zHLRC7MnEi%cnhHN{V}C4eC-9#H}%SBkF$kzCGb0nX$H%aR(uOM}WN%3A1HC}!}V&{p*y(>ll!f1qZ z=c~ThhaW_mLE)jmG4)|pCANx@_2i<#=}bRN%cg@Ldim!)Zy_>wc zxW0-P49^$K?WMJO`2G+``}%KuD}tySb&OnEJYmMIC`F)_(834^(ux>RM7ZnW>;+@d z2!g$Ksxd=yv85@6;=2cGCH&)<<&8?|W&k;f_{sH`m0$0jhy-*|>0d>H*r{ZV&`9h# zadxm7isUR4>zSJ#>?*K_FfrYgy0-)qdE*mMvflF(RF&lOnIxGZ^P01wIhY5KhlR-@Zh zd^e9iUAp{`Dvux_dOnD|oGZZrSP>*TymY%o^PFeXEgOEudxR0G~57GD`XJ z5qO1*V-1NDMh3%s+!jQQjzrvd;I~mk3<1VY!i9dTxWa0X;Gl)jIZ8=tYT4|xfrlh( z>$s|d_9PM4gjxRf6*^=?LeFXM_4t?6baGJR4IwRDnlGPMYOi0fD91(}ER1A(diBer{Bl&ykp;Esww*Hg(61~Rk~#yOFj z{R_mVwS~m3VW}O#_B14BTYkQ|;TCm!wg>U(Le?l&vt zC6zyecC9Ao@c{vEcd%X#B3I)&E2wVC$cILvUgJ8^jzqAAsaie<*tj)B*3lHt#>S`# znWMs!wzxIHpfAg8zTT|B6Ba2pka&Wb)Og(<3HIFB{A=6?efA#S#5))X>X}7P^?N(2 ztG3f^SbCgI}&#>7bZSj};vWAocqk8HC8H##=&ra0>v8 zp@lPLO8gAS0ig*yuL+Ssf~=D=hfYFnsRAdEVoE{#VYiV}CXR+@!n0@$AuS?(!jKd@ z=xc42ZX}ztPVfCA&vg*Ia^5>9k{H}yERoR1o@!juC7viMDIh_AF-t#2(>j>tvp=?i0*VA@SQ*+2|{kXoD5zX(h)rVrs`@{yLx0RM|=$+%~gTnkF8N*o&xs<%iw$v_bX%ZrvMK8_T*#Y3u!GmbalO14uU^w`5N#Zh97 zWlCtL%e^9f9&-4cghNu-t1C`agzzO{6HQQYDt>b2NhsCJj!CkHB3oe{mG1>hP0`H;A8 zE--B+J;2W#)A-~NT5W;8ISfBCb3Ka)dhA+QBc3!)&#(o-m|ip|Q{^PmqEu0qo0DCA zBj&F337GdkdAA`f`(ZanFFJy;t&T0f$Hbuak2#ur{Y6g3D|LlWD!$3OL8@m$>6`BJ zA1A!k6wMjRIq@TH1m1*ga}B9o!%8QV(bI9tI`Z@1;);b)0s-mv0z8Up(6ZjPvLx3A zafl9hxgW8dY><63;oVrwwA&Plkj4OOi#g+nO6F@_$&Zyh-@+NW7?4zx9366`MM-d< zms}9Qz7S?mU-^;G&Bw=nnur-cj>V~kO{!;YAZjsshxZs3vFDZW6{$+qBviS$0!a}K zxYXqT__f)kI6H5xnP$|FL`!gqLnr4Uq=ebDYLTTCP6mrN)e=iOWG9`X+%vceTCpg0 z#|;||R~JCo$K6+WpO}Jg6MZM0ygXDxTR4Kds;UI9aOt|pTI(=2Iw3hUiVwB<_|$_| zG8+?%g_x4`u%o6u7sJ>Sryo7cB(%85p)uEN&w-%uP9IQzT z4;slKQo3VYBT>KrMkfO`Kh2nTVe<4#?ulhnlE;$j2uK;h{#=m8tL&HcX)bp)k zf`=>*<8R@Ge)egC0MUCVXnw|?T)caBYT1i$Jdkb0Bq&@cJMGIPEg@gDVoY=Tc0w;f zMJ~aZ7RlLg>vjs;Wo{!E$`>4i`XQ}?SZp5Y6-rcV7 zXD3}~0W-}$J)3W8|EBNOWlx|fSLO}`XfR!qhPakR*|(|w^sQdew>ez%;zm+L&`-bo z{lAH6sC*8BBw%Ns=J8XgQ$R&x5x1hoOFvY#<4MUZ;zK|mdaj;ipSVsp2uBhm2ng`! zH@XK8re%rIOCaGvlgxPE{Ex!76&8%ydH+eMWq^D@z{OFK$+qtlkGbZ+vEL zA|#?SG%;V*)VKL%M#oA=%>HgQ!2|KH=)f(2NZV9f1XtUgJt|(X1Ot98UpDk*+C>tQ z6V#@b%k$`kPtGdFNKpvT_f5{=3T2F@8uBD|)MWVMO*wpv=t)LBHRdED;*`S8_vtE@ zz_2*rmrRDAv1e1wDo3kHF^g-vp6(cW}6yx3QyB;Lk@>!NSAz1L#sF&$0P zX|=UZ^YiGuYh;Og^o&zxi$#O9wlnExUx#Q9w%DhXNh-Nd1k+Ey_en}|w9#gknV((I zjOxv?>*1N%m^ovjS&w33R~(X%D@xQJ3*MyU;Las$O}TVXZF0MF ztUxxeB@9QJ4}5ge!?tJ(3?3jkLSE>mY`{vOl1cH4Ow8D6UrOEOY`Jix<1-jjwVkIf0;|YWz5k);- z0!GGWNGutLYq;gnFbs+@9!}o`epj~hlS#kzMKA+Y0aOx``dyBff3N_5yf>%RFZu5PM$-PVp!0jUq5L3K;mf#=B1OjpcM~DyrO1HW?C^IR} zApX4C{(E_SnKgG*i!s~ozN8}6z8CX}eL>{=kGHk3`@@OrH5y|P@lbZoNj$y{y!1!C z=A(Xk!B81pUA1&z84q$&-##MVy&Aez+DtP6iAs(Rud(Z*CuB*=5d{_Ab}&31$I>k& z+fWtk_}XMXg%)xlHkUETl!&za8T^0=&eFmpM!NM@61Q8a*&;_CAAbdl)WUYpg`aAP zWwCBh0Hu}(tDm=crIl~n{EAv$q9e5=X=H6b`zM>B;jkqxDzRQqwZ!hi&;k&$+DV({ z7>jG_mEkvKad|e^oQm0mhv-LuH_)x2S7uVgOTR|ilGr@#td5gtyOIw*769n^TE8KL zS82pzXoFEi$dAQM9YGTkAN-M1@=vOEhr`1WQL*oNvJ~1tnp%r!9KRj3icO=pTQ$R* z7R3%x$&t~Qh0=65FT2L4esgIF`Bu0Hj&T|c(%#k?h{ml1GD{8YDJL+Bq=m#*y|-eZ zrd{DAFvY)6c|r`4;XT zQOBt1dZHC^SxFY1V{L|aJzzw=SzE*?0=DpX2l#)qwXGV|_PCJ{IZ55A9QYO-%{wjs|!? zwZj2B(9H74z7WP7yxQ6GBt5w5eLfwRfbJ>(93J9QF%|Y{2xK2k+2AO6sPpC45OrVk zc{Q5RcWidb%g|Dppb!X-{?q(cBTwqdfPDVI7QS))I}m=u#^K`a^ziY+P-*m)T2jb9RI8RAoKv$J{XtipYBF~#>0;rp20ax&eyqrGUE zlc)B+XxsU6-geRXvhcBClLt%^kB#reysb}toE%J57R7uFMT`0BpeNdi=T+5kZS~;2 zZKr&^PONpZy*um_0M#l0`9BNp|2*C<#16Y5dfJS6wb9yhD+rzU`LN(@wQ2MABq&|? z74Ku+>gl~l=%axXjsT?s^S$pxdT}#FqSVW5wXtSTeVAM@baM4Q=HoTc2|BL?kM&{i zW1hpjmYZ%Jb78ujCG1J6ef*v~0mo#uwqW!=~7->$kFC|{x>|NH$xx^Y3AUqZJX zTJBGm4OuhTY&X2V(uxJc`N~tF@V%Q-ZJU(#?iQh<&CMo!jq7$S9a7z~Szkkw>FxZ@L=n{nP6f zAHCpAI@69?9Y^q-8po$Kg#YQ4A6LL?{@~L>36KLG zxn7Zif7tAKVyqKsqrU*U$+#%=|H~~1S1}<$Z#~{<#MzL=7K2uJ*#tu+$}7I3(w?uc zBBkmGX>>#*fxmzkB@!xzEvu*NorN*M{nGBZqx-m5!*@Pw6~t^N_@9n}xPmr`G%8jt z@?G^KH~kg_tlnf@58ab@-PjLFVk?imUyE7ler$AcUtS(Q;L$4F=REe=Y6h5H%@66_ zgqMB3?(%t>bL=I0YVH!b8Kx^%pQqp7us!|$wqu*KlJDlS+UwSGV9-F*y2dg0bY+{Z zYE<6dQS$LPgJ%H~bNc>^lEm%**&*;BCvYC7u&FnxhT@Ue&bW}<$LCq&mj~>e|wbth#C5rOY}Nn(*^!5YH8M8 zU)ejZc8K* zv?J6*Bx^gKYdbT*b9d2ktB%f~!i(4f1{FA@<9m{Ce#4|YhL#5!RmYh zY2r{DQGp>!3f-BY#&wzFcmOh zejQcCG(PVv{)*1`mY~~k(s^n}%RXG=f@G+!kcOM3Gsv8P$yUd{eZpP)P)9nd&{|Kf z5{xJq{-{(0z+fpLhxmK6*XA)zZF1j-&+DpSB00gN4c@PzrK5@z`J<8&iHh?6_r*! zBUO!VR+ISd4j-WZ1dWPlR6#R8;*I`c0F6s&z0EJ;TgGi;DvNXq02k$u}zdOrF^}L+65}!hF;dRrFw8G%^2V zcpdKP)0oWir|z2xL$;~Ri79TiABoK4a#KC3_gYTT^Sg?J##}jCnDZY&OX{?tcaiNE zYhooQoMKu5qnVm5KGy19yVfoT1FpIpr#g)9yf1Q0{fOCOGgz z?sC)xU6XD5gdVX*6Xv+0{iD=1hJ?iP9ydCgy zDz9>!64runBTo z5{(LbkHZ#cfaT1dLlFv}dK2;Dlqk2lZ6Gf+XkeEY5)YEck+Y$*0Xdg|&5sMz1IW|N z#0e~qii`}bT@2Ft1%#gvXXw}M9UecJz~##uc;FVe%n6=NnT-)kON4}Nms8z@NkmLc z*SXlA-2Us=l<={Y)ScQORVps!rh)<(Kum|7ghhl`OA*jT{~$(b0ImE zfc}Mfr&FnHatFRMH5o~y7Vj**)Mq$NDV3*qksx!*pOtwv1Ll+?OcK+mHBG9Ex1f?nOl+%i>tBn?=FFyy}wy zN+TR#vWr_yn_Tl^+c%QvyTul7Zz(6oTw%rn&r4rAQ)7GHpA#lrMu$9814ty%_Q?Rx zSjSipDDGbs=79bf2C)0w?F;i}hfN~=-R+-*<;tT2C?S%K1lB&omx6>O*F)jAOW z?F1L&iZn?DHN7Ab0jq+`e)%VtqB@Y#E4)8k?eRb4~g z++M(?Rb2nVro!@`G|BkHoSBXH=#2CBzVn6%TGHHI-RARgWe4;2_PW&asF&d*(s`ro zrbm~FTDC&1o4!xk;DXZTQGFflO>dGr3=12w=`$`dP=m`f0%XHiw$b>T8^6-@={S4H z4=(Y5mOJgc)ybfaI84gSZ--J*;@}w|Y~s=Hq3#zAxIb%Xrb8HUkT{Zjxqi^$UKXt_ zhdxR!j%#k8nZq!!=FsH&L8Y#aLMIl=zB-w6k>^M?HcJ$=(7&MLou`_nsh(UG-TB=t z_gt7MM@4L(GZXv!0UxAZt509O zk6(?8D2}m5IKK9oZg}l{#%4CxaXhJT(pEYtyyNP+j-$MU66@y*RXxH;xsG z`1_H{nv~Mag5)IP2j|{rX1<%Du_>fJu>Pj*0%=(p)(eDU)HJH5yTr%jw$wpp8ln?X zF#nAYILT9w7uH!-+??O3pcmH!r)0J}m#dsmt1%Q)j{i&m82##e(e6{9m*?vqAC!zw z{Qrj^FgDnl5e9^I6yJ;+YwvFtp)NY{BC4`jk82wM!+%3TKW9^RV8EY@ENi{|AeS62 zqDS2@1tF7&V^tqm?{AmBBLyM~ev5YAVJ9MrVT4CdcPr6U8qT<&-*Mwu2x8_4KAA6- zse+(b$lTBut_*wCZg8&P{_w}uydI=)b9M6Gt`YMufb zfIu0!`f)AxJq3LvKNx%xuWfgl&g=H7VasB>cJ1p$rwb$|dgOt+DVKe}ODuDnX{p6k z7?DYYW&oq7xx2yQ5#7h_pxIS^ZG3OVBZkLye_B=<$#;BSclCdO3|o(}=qFxJebMiq z;28(T!)AlI%rGL0cQKf3F7WJ#5&}%mz8n4uktMIDCYz=h{IRMST6Zm+ZQg$Txqs5( zQ-xvi>n}fcj!EAv+|ad2?(uG7%k{!PafqRpd1ZA`)qNg6sxFbr1QCN4UT6 z1gdW`*ghxRTOO7#6a4c4Q+IzxZD%x~HWSL~s$t=^5RBEY9IBuJlbfKFyk(|lp{8fT zSr&4VDA&bYQgiMVz}YR;+?OLw5Z+(`#Tmoi5`66(zgLhRA7A;gu&lrLy5AntxV0QE}HaqM)&q)|jZ6IKo2ciru zi`}5HnPoUZ#mZC4^LS}Fc!g*kUDk|gbkSBL4pTCZL;g;RbW$lC$t{_zw!SuK=StwG z${0iP$PtHw2Fk8F*Nh1^N=>j)mfRv6hFUJX>ow$*;kJP~-U8#V33D7Px2(Q$q<(_z zJ*X^J=RiYX9@k%{#U%Ih)U|M&dFC}L-XFLDloQU{I^rD(f1BG-&SnPX(PS74)N%_H zWKe4>9HF|$eZZ?Zr?W1fXqadV$IahsXFsmbf9ZqmPD(I?C4%5p*cHRBlaTGhcTw|R zFG!2*##XfWIu6x9V8&#T?Q#TlfJyD1zCSuL_z>{;BPAY;%a*Lx$$QtHryB^d8lfMQ z!YLZod6*zZAu=y5xSaI`FMwJUQUDi?K&+{0$CrevvSstc1U(65Azqs5d^)L1nE7mf z1fC=;@+c~H`aQ!id_#ADxP(ywRpcMA!Oefb25&}u&`oE|T2})gi2~<0w z345<_**HrsgoCD!w7!wU-9aZ@J zKk|oKh#aAwobg(@zM|3|x&sgF!N`f%R*J{QGtZ;<%z@DmW6!5#mMy&TX3fC1#OzCj z^f~#FMgBGa7&mOyCPxXCb2W^pG4Sy;`>S$<3=X2^8fW6w*YkCi_V^2PPTy^#x8*N* zKRU1ONML~fG4(r}J}G~2Td3XLpwZ2ml`fp=aL~!BkQ;&)6y?ClJclw zMJiU>aLUg^7B^BUt0c6^H-|E-Tu^0lIFBn`dUT|_YD6#sJ)3g~Ru0?0>97c)84wx79*e`Q*+(KU< zYm6q*l|m%oNJb|&oJ0Mype*w;aWI@N4QhXaV^RnI!epK9LCF)U23*V#t07xPG@gfF zSe@Zu)YP7u*{;_VuN05sa+Sv$Ic_KP+w29PAQuhU`9D8agK`ahRWl8x%)*g8h>G}Q z&HqSK8izr1&7fM)R=t@(0NL$~|7K6+XM22)W9~7gOy&uN#BVj3-1q_%Y!?heqxW&0 zV1%g^xr%{Y{!j_2u#)tbm*+$)y>@GOdv$fX1<{5(f&P(NA1ivsTFb#6C?!Uwy)n_P z>)!y&;+F23O#CK#MVD-RndApUZx4N+Y;%=|N4sum#x?#(q$mf+{XY?UU~>g`b&O)V zif+`!l0!=@p4iPx2le`-QR+t*19CaP)#TzF`5S;UCQO1#07>BW($$>qe1Qzlgm+;n z(|nOPKpGjaK6IZcI)V9$JJ@4Jvz;t*Qecn{?eG91 zB#TifUR_8MA--8{5?$FhyqLOcucb5{d5V9YH`_5Ub$q0QJ~YG7n_Z~8_ztYu=M=2SMg9gs{ zo?l!oT3fBD5+dDby%egiTnMKYq$4-EIFI}7TXq(1UcP{iK&Hv5`&yPzwNg4YB8yo; zK-GChEAgE5Bi0P;o`7|1#A;P$u7ZJi_tB(4d>+iqpSucoD*A;Hf{4j9RiVuDMZH)- ziJe!tYs=3I2bi#3tMN9a`%S93A>vhL#KXu-#`?2CV1uILQa#WyNCMC0dk2-D3ra`86EwJ zEsR@O2Xi-+TE_FG3cTw76Q;21CIv|dm%+n)ZyH)zZvIfloHi(8VJdVb;qi^wo~h z+wnODY3MuzfgXN*hfOFLZHsR`q>>)g$>h7zuBlfi&8N`_Dnn9S@Kse1_Lkusk^hIU zcYuz&>)OVr4VyHL8{4+oG`4Ltw%ypaoiw&>+qP{d|7m;Q&-=XJ`hVYVWzEdW=$tuw z?}O{Q_Ws2LESS_Ek#6yW#A1VR0~UeZfzx7SjU)&+GIRJ4?U{j9R%soj2eCFfwzFrz zcQZt2-2U@5vqQ!1jiI3nNFaTvlHZueR^SP)j>EX>Y&JKUMTm9Fs*vaVmgoCDbTjNd z=-v%SX8U{BrVqrp18aXN25Q@i36XVSpn%J6wXYqM)^76g^ zSL~xoeCSMWis#~QsA@BX0N~auEiM>u{a`cvXd)EM7Jn6b2Wv%0J*=Ay0YADr$(}h@ zB%Te=*ZEdA#w)6@H*%Eqe+<b z9!5|z{8rmG_wbVrb1k%9JKQ^)(<}LT4qV&nt3etKze(dP-1^FAY!#2XXnSc^ETfK!@dfBqW5={0k zilZXvXO}uT<@7pOs3E%7KU{wPj;N4Toz9BSA)K^YtyudDxXIN`<#p6Jt%$(gsbTYh z9Necw*j^~AIS3wnD9-(E9TY`-WX;JTFNWjoMVsuq{l@&~a5T z6!#@5~K ziNXDYQhMFd=5t}reoD&NjsF2EG24CmDaBVwP6Wh$dzAQ?(2V_5(v_*%;p#Kxm~Y|I zk$PC9)GSxjS0@IMdBBuz(A+M2vfYJiaHY(I22`H8HZuMe;Es8TFE9}gvHfjk%~si9 zWVL%YS5(6Zdr~Doi1GCsWT%l7EJ+qhVum0H&VG=#)|*^Z+hWa{IL%jDi9YvCGy`2(#rS zCikqeXAuWsh)(p?oQ0Z9yp4UIz6qXXQ@42Lm|C??hwF=M_o9avKa89_EJ6v{7VXwt z?ub~u#V2g8X}d%92(8RmbYime^jeCqdE|Q|B-Sh5w<#qV*rsOOjz#lEWGc?<>_tfeAkqVqbwJD+wV-kjGi;Er>)LoY*3E-oy6YMfiz2QkG{4;>@vn+;1%WHONI z1wfZ#qe~7kV+*LX;u-2op(@I5yv!eV29xLd7&N|#<}u|_E4LSB6FPG)@`x#n^Trk; zAyXTLTSQlFni)V|QcBH$Z4Yl0BE*F8JtLEQbhTB*76sH#^^Y-XaJy1Er2*)% z&w#cgZlcFsx4LM(7vj^pJ>6cur;WqMT4$za?XM#a(su<>2Og1bV#MZ}`=J;{BuOEn zL|H#5Bz17u8XKQAB^=8nn4PXyo{&0GrjJeC&%O1Lh)7TZXLoPxUz>|<30t+uSp0zM zXlEB2AMa6XBQ(z{HQtReFCO&?)!km~sJcw62K((>Q&0a11}m^mkT0;l!Oqn&N&(o% z4+Yb}x)#m>FpUKT1;ApB;;?{@87*>kDxCLCALi{R9@kvPFGsi^Huex&Y+mj?OX5qN zYz*_@_b`52{K}%8qoW$-*NN`Yd|+kMWO>*fiEdw)w})FsfvLF!HIS@-@N|-?06MFk z=R>Gr*l_SZ$JOVRC+`Q;yuQ@-yCjrx+ieZz7Y&@3MBV(@TIEiGFqm~W5zn}8&?F!Y(DbrY2 zTnJNo<_|A&_SKghk~%va~WI}{!_o#KlEL+s~qbQ&O*KQcB2YATg#9hg3<_I_m< z0VX?NMj@YDQZn?-ytegHEM7ud295>-=O4YvKVQ0Gp_5#OuB^vgTy_KNf0nw2@xNjG zxb2kP#dVk6+3Vr@x0_`i71t}nVWxWzJ)t8U&*z!1z17QK`-Zttip?jvnEBFh-uIjZ z>V}qGpCV{pv-&Ou&TvX)@l7W0-|U|;q+U*2k4@WeYp>)b2IiK@*B@{^YtiF-{x=V0 z!{XcCIrrn{W2baQ6OBG%+d^}Ja|ty6z@qERZL8bdeU-(q-8C>YtaQjI$EE5`VlXX! zQi6eEz<9s*D`;Q1d4JiZsY`pS?@jS=-avk8e0RIw_k1?tEen&PQEtz}PvV0tzCL%> zb{=n3@^G8jOE{zoVSc+_%XoVQF-8LZl1pz#lZimu$w?T>+oPi!Eis`yjzG-V0rqCWAwVw3$oe>%jK>WZ#Ly%s?|<_OBzQG))sXQzRcrs_*xfQI&L=D zs&gwz;(rTd4xlp(%RLIAhvTrVb9Oj;gW?emx4j<*1k2YScL=mTfrAA47Tznch0;2m z+}4lTM#WX`HmapRs(Nh^ORRBp>saWS{=-?i{(`3XD^|%<1lA9fuj{=ZNkZ--4*|$y zi|Y_pTP9^j*-zZL7Pry))jAzhK`Q41j~_mE7qu|Lw1+{BM&J++tIL(s&aUxmp!Iw_ zqX>$8i;*Q37xxRralcEB4)fqVnLeDEBDe2ts70NhjV*reFJi;-o4Q07BzS>C%k=CY zRzWH*GfE*5h8m>I{jA6Q8RgdlYvakJVnAuXGMXSH*_K>f{dvRRPR0K2)Mv%K*iv~v zIQ;arB^uWq@Ya+3zkM?NPoKhrEaTelT6rFy-(L$DN`1a$@Zzf}z`mC`-hUUB2C7-= z3BJ{}-iij24T#B))fZ^#G_+0|Qx67rL*gkZJx8+J?ozJFe7OWFxERn*Yh5y9E0-|B zxA4MugGRazeiW=955aA*VMG}+SXbAsVtyhk8eC*Zqsqq?P?llyUq~tVg{Gmah3%vj8(6>5N|c15qB! z*cD2ON|>%D>&U5@#9tfNUV|TGg-&aKU1Mm+YCR8lKeiN&sTF0vGLH$TiSvmE&aG7N zW*Vcf+~y7{!7ghGYM?&d%<>8T3u{pvli7jCb@Rl2F%tSptFjF8H`4 zI`p-}B?r$Y_+P~N)lC>J;ndB>;3d}=+WJ<5)jq#%8*2Xwx<)i3v3VzbHt*A>JYVLn z4+<>fGr==6j#HcSNq$uIV-f^(98jNG=y{&!)2X1PC1^4}WB+;4$L9M}mn$=6wNV8j zT436)xAOSF^O(C`Q<-fK(XWxlxa0Zw-iX#m$yAhe_uUHDLB#D=LEkLI=Jk@fGU$8$ zNPI%RZ)*Js1;r^%{$`rr%IH+SmMF_5p$Uo1?bIl0kGDQHn_PeYP0IQ19-!r3uP9gh z8;r&@dOp~N#fn>Bk}2!PcY(tJ4h*>9q~}|XTy0u6HA0GfgYIiGpP=sOUaGooJT?Ir0v=+>-Q{ zqU&x92Su22w$eEDPvIn&tuG9%*#ifM>+ROm4y$@Z;njx8E+rO7De^VCuSZ7pD6`*{ zH!=6WRffI^vYP~g(ZSw#jL*N8B3>7f=sP>>IzWk2K8e>V4Qrg4pOoh9B79adhG zub&vNJYN~6edB}tmQ(&dKX{=H*?IojWH~tRSSlG!>jwftvc7CPRwPMLrfwA7{G1$) zE!Pl4$yUTC6Qik#OoGc_GHRHt+&x=L@f)ahFJCekP^+->QlxJ1S35p#UkIpKUyH;O zg8Z~}Vo<#G3E|Jn^(L!cDv`qR@eC*}39KlkNS&9HmSS6mB=TZgw@)T*3wStnU+&^L zGjT8ETrK|UMo!F`J(&{SNSFrUn8tk>}f?SvUyUHBs zbQwryKx8d+ke0Zq{(NT*q&B^#mV3P&x$H6d9>)q}`Lg8h8OZ8V-Ow0LA z$NSkPn8rMc>8$~yCyF+;~!ObEnu>qs* zN&*y~XF}@D%e>F=`2ln~&cGvUf4Y^4gq%YmJp3IhE7UFhM*tM49d!F0>Gk4r|M*=?B&0ohV@#!QeN;9WA|HPgRNrGtWeDO=c&0r}&*vRru%J$ZIef#Od-8xru zH(-6utGRQR>y;|a!C+|XGnQXq0!}3~y8q;ej|OSBi_|fzHb*zk2ASKxl~;EE;k`YesCY$zsix*C5wE)%^JX(yCKR#7wJt zLw?LQh9c0(%~69lv|EJd6GlBdxaT3N#*OR*Br-zIdSv|5 z^rF$-D_`b-JK5jY87KZC+G94>atzK>Z}qslZ-5}yns#nXweIfv+nW%DMSCB*D-Q?z z+dFC;JdMjPhFkugYh1NTToPfqZ%>-6x;RvUL zhYA--{t`r9S;2XEl+!)y^=V#hS!zQ?GczV6L?Y_tiy~{|Ju)0C1&Q>f4AB@63`Vps`bH%qZ0V?`6-YMDBIm`6+fptL6e|$NY@(T7HSv?% z{{P6c%b5?5>fkW^I;uIN^5Up4m16%&$j7bByBiyOPB=}z0)K_NJFpsqUBuY|sMnpg zX3Z}n4oZz54aBCG6H`8>f<@2k;35PCu{h*LZOX! z!G^4Sh{mQP_{4t`8hUzdxP$5Fk?uqMz*kBX~_{|>Vi1cGw| zA@|sxwI;U$R(la6*LSY9ewFyl#>YpD^nD&6?HvNgO*&_CrQ@#};lEDcc(rR{EwMrs4qVx9A=9pR_Ls;pGMSeluP<%61oHx;hPaKD~6%wgGXu=6q5OKzd| z=7p1(_B}t~Lp!!xBld->bacF=LsR(_%cvF@sw*yYxfj&k!kmKAi+00s^Y&*{kNTL< zbcF=J+ws*@g8l`m^$$hpGCo_ENI-^RyKcwnLE@DREnQ5Y55piS|IvZpvHM-v;^w0M zD^v5*eWS-0e1zwnv7IWM);UsUxFY|} z%mvS~t=kuf^#^|z^oasmo`H4^DiDu`u8O@#E{R6IRZs<1qyBw0fS{dkyWO?k`_I2r zJ*|?&Sh%BTH>!_aznlTd{-;a!dLo9t(*LCd0U!AcaQxb1VBlV}mW$Ye4a*1V_mbs| z@AAJz;6{+05L%D3y}|wMDsi|(09TbUA6+zJa)n{F$HCc-?To(|>ycDNozZdG!`R%I z7d*vq_}v_qX_0x+N@X=U8T$uX2@v_tJ{!Ct?a;U181MYxx`q}w5DWB@42ot;y`M?x z`$_|i^CPs0FVN7OKW@B7d`MT4D))aN-@Y>CEFcTVggCi{+Z$I~RdA8L>0><8XA68t z0z=3Bz^UANw-Z6S;p;XW}azy`Zbh z*|Q!u*$owHev)|0BJsG!B`HjknlIPia?>8m6?N7n=+%qUfbIy?xhYZ$)5-J)tb@^p zt4Yli@8kar?1iXg2qY(0Ura$z5aUqP;2_Rzz$X4=oOoy!^|goJw*~CW_9iRr6b6j^ z1F!kg`}mqWgrKzsGzbsFa_yAE?OJ4HB&aYhT()BcOdT2BLL_Uii00PLuI*CDHskZ| zE)h-mv{}D{+2VqAC*49msyM{tH;rG8pW@PzG|Xvi+j$fXz@mTn0esv&JiTf-T+bUF z&(5&Z8gt-3u%gZh8oQD0egZ$LMw9rHvG&H9QGWZRqIpI{#D9DzCquSyrOn+{MrM*Q zW(-l^lKtwYZJ3FxPctP%m?>#G!AznOle*0=f7A%RkRiYP;j&332*FCz`My(W$fo}F zMMC}){!E+Ef*y~-f1=&qc`v!XnN$+c&xVn=~u&Yi$DGxxd)f~N)&@Njmo^}L>~ znV%s`oICQ=*M#inlOorYRFoY7>!v>4D_c}!42|7hZlwxmaUXhSlAc~9$MuF*9@(5V zD-u$_QodYYOzbxMlmrgNN!9O|?%xPY9he{*`$6+|)RwY;8ohMgV3aw>TA%3xj-+1| zk5q<3lshCEsFVCG(uSP9y#UN$%~YSbCva3~n$GnE{!25S4pE?l_-9wA%rdVT<#w(& z)6u3|Zb5m#CebH~kV@F^8=Dco7>!&|nfvCa zu0-G>hfe17l)}moMOYlL-ihVz{*|22{(>Jqb(Y2S1Dc~d9ILl>W?-C78o{xZkm5X4 zpH5_o!BY^WAV*^aXAX@nUs%g6qW?Z>xUgPLClXxvCaFYtGI+XyGCtINabXbXQ8ng& zEX!`cqXH5{t>rq;Q$Fm4mr_*qZPy^E-Xo=U9QRN>we^-H*{m9++(jA4?}=;CczKH$ zGySnQx)h&dXQ%G=JmMV`bqrq`#gT=bCKW&8O7;?osK@JWQR=adr<5D4`A14vq#F=pS_= zltSTaxb78Ncex&oyg0XjE)a4DW||f-)ypr>z~WO&zR%#ism8ygSXWEu1F4jMdwa}& zB{bMRg4j;IJ;!;w_>@v|09=;Q?T4O8#?rf(gwI*7+oYS988uU!xfMwdj&haI8(5qC z`UZNIFUAGej7u;@8zuJq36B@^3(1G|GNHA$p&>~cEEd&oMaG%<<~@UFr)8vVQ#kQ) z8O(_-L>37Ffi*>@MFFF!K@;S)M^Aa)iKt^33v0h}>b(rn!bvnkdSP&Z*d4hTI%ZWv zPh#gt(!5y5w!<}m5%9CQxhX$^9;pWmVnvwGjl=q9*ZtVuQE91B<)N%C!psaP0b8H+ z1t%;%kZ}Bk;2O8jy*y;hLQk9Q+K^`@!@f{d8f0N#r%;t5kxW9u=?6d zeGD||+`xX5N)S*GfRoGc&@z|&$wex1>THZAudfu>CIKYqkdhS!osDD_XDQZGkTxbb z_7A3eS2BT#9fUDGN?q@;mR-cYL3%~$;~IPb$1pn|KBF!GrC;?MmtDp{$H&{naN<8X zET1sZn{?!sPMl?$Vc$+0eF!{h)Z#8KPJ#ThpTNjgq0@am&eQIAz7~eTvXOHMBNvXwEG#VBj$ts`Slok(aZ;5{ zMe6thh%M88%a%}u%Czdfxp;pV_Cg6qqEuzslk0mzT9<8ctf#x#Ja>CmBfv(!?`ZOB zEmR_r8TI6JYMQ&TYG|^Lg+TlQ_%%T^tY|hR3$R52OJP1G=bEZzBtl1*@~j$1syw|E zYR4u39~h3YQR88E&ty4|qJ1feIFpRlb~mb6f9G10gu-+bm^KfyPYcd{5rzjiu^*+Z zO!@^OE{nu7^9v2q%&T)+i6>>0Y%T5u`%AK9`PeUc-TthANjM7C1OVE~2YckUiWT<9 zWq1d3=&N*Nk)&(#9&Il3n!@WsLDRf#QaDV>k4l!%DC7j|e4~f-W9yq=7Ct#Pq<$3r znx2kzzML@KO^+Fs7ooyRx&RI_yihn{G+d|wkwm7W`#if;RYV=AsZ4870dsZm0BLQz zTYwW>jWAnNdn~$$tjM7EdSfd3qu+wiTv$|AtHH{7oSK_NfTZr8NlA&B10TgART2z! z5I`@(DJA2sVP@x$i~19;>%OkmHG8jz>|>~pHVr1FUIE`r%8o7?G-*r?Vo7vbkJw5L z8dH$)RMwzKkyrwa$Bh>Sh0J%ACKC$NXMN*hXgTodUOwBYVRSb9v~0A8R#M%*liaD- z*lHG)h<`iIOJTt|8fBWH$o9Ofg$vS(zHGQBwep0Qye=r#@iazESC=%e-C5m3ywE` zt*`g73mtQ(CM5G!ycBA>0V{s^gr9Ur^=F7BaTu-&zut{9v=0#f9ZM6f^Jk9V9IRIj zmyn8Qn{4;TfF;+NOy^eKlnT5N2xcZa?k*xFL8Oy&t-uPI&_4A{4mC~}kgk<+FV+$h zBv;iivPf61OQP*gw^N#&HSI`wDrwD&BY?OKZQLinh1jPN+Uyo3PF`dB!bRb+t3cT^ zbKu=Pz~0^V!(pI&=ml=NWa5YImGImSy=kqSIRTks!lC|%Od8mbFdj0XJ_9DQs?Riz*&OnVp45v&3JgRwc47s@pnj+~=Swf0^!jN)j4^4ls)zyj zWuWD%w;?q3W?Ua0ON~F8Z>abaR4t$lN2|hO^C&Ei4>L|K4nsp%f6m@i%BcKN`YLQ~ z;A|bDgqa(XTBZN}o8(QcN=hZhr>z6$`|`Z-w8An{i|b&drGeZ=*qw1<)U|={xX~Xr zoVN~*PJql0!H*k6J+8HFLHr|9kWjS01Ed1Rv!x?@1EFDx;rq_%BN`P9$pH67!8>&y)~?GwNV z*+J_su2whoFnBqXY};%8^ibkl96wad{q$fqIygOa5X#C##92(z`V(^W7a$xLj?^QQ ze8EyOIg}v3)d~}wG@nv^?T}SQ!@ACgBc=KaIZYZVaB8Ijqo_b^>tUi`6Em}sv9Y$s z(+bs60RaI3+(tY)4kqi@ji~+>TingDR=0J@c}I_@Q(zqYY58>1Xbw>;_4+hCYyjAx zT7BhNClm3!8-%##=RF~k;=WewUO8`TWNvxTl6o(3oy3Ts2`VUO`v`E7YQHWfDcj+^ z-|=ZIlgXf;Ls=M0cz-xKJ_^I}Ft2;B4c45F%Z7zLDLrp8xefI3?s0JKsX9lY3Q!Vq zzNih`EXQ`ciO3M-c^P4tcF}y>t}eIJsNVj`>bV^=KK7~8ooV89Vqt+aDzuHEZ27cK znKCFSD5ILXzOD|4Z*5Hw+^}soy6E<|QwKuVnUBtIbC7m@`cvxE4#LFPI4B~bpLN9q z1_ow3*Tvt_+=1Ec;uz=sZQbyw=_&1H;_lbgvz8@=s%Oag3EiaKb?G-?Z9;#SvuDkX zXPUf$1{#uwpeeCkAP;Yb4RQZRPk1s1vEkok#>{7;~GZSbyB3a4&AVPkB$k%8L#H=RpnWsv@dyfw!nv z%b6@ut3Lxuwu52{8G^U6XAoC7B$GKvSoU5XJw0_Ps{Pj!n4aGHIh7nL28%~Uf(t<0 zhXA5!G5WYdqIZr+++R7Ikm=VorVqc2Yg{UOWTOIF#vth`^thZ>d<;p@H}sVp2U?+T zoWKZxt!0!6kGBnos1uXl`~-FkeU0Eliw{Zi`OoCdr-u&A`cOK(05GpTFT%lSSPtfdrOx8HCG_`Ui0Z8;^+m9 zy>VcA=?v|;6U{Us)cu0Yk0L>RcaiAjY10S3}rBfoW52{ds&@2qjNOGMj>K8sO=Orwj1aXb0=Fp@|zAp7el!E z5xRimGeu}5E(Gg2a~R9tGy3orUyIQt@KFj41}pkiYu$s<;k>ie@D9g8y0huP715`? z&DnJQ%;y=lt$kTQ|4O@+o0~>nGVDzo1#N$$D3#>>J9_*5w0rOK{i3sTARZS>=QrN) zc_;ioE&F?8k)6c>(IlF#S@RY%^IRaa>i+s=d_!_<%9&^7lHY5+554|i%**k^l>1$r z`7;vF>twtD2nMc}=RR*|0q%{Tuj3bfV8?%#G4P!*|LORph3jLpEv|JHar3U|rC-J4 zqV=cXEcevj%^@16qFCi+iUht#X>Wg>kmU=3Lb1IgPhb3$;CnTrX2Kt>-H1F+Qw39GAe#-Z)Zz=0@ zPG#<_$W>>@Jntgt`Kf=hzW-?)9G_!j7P?`;YM!Jhy>`>1s@>64KP)BIty1idyZMOm zFArp~*&A#9Z*@0Mukix@q@f&tjqgwI4$fHqd(RRi^r9JC^D;b)Q2BC~4~K>y^BEQV zzni^3tL>qZ%IvB+bLQjedbTmJKCDQ@8H#53UoQeC@dSEdtjp>;by>I5Uvv5A=_VP1 z`w6T2cutxSn7Qs`<5(!UC85=qaXcQc zZ^~OP&T!7cI=Pts>HOa@`h+pU8q)^rag|ozv%E(A_*ldW24G!+eY?LCYJZOLX0tT= zoJT7?zh%xP!DXlJ{mr|Q7cy%QPkb@p9LL<|=!(T62}4qkmsXnfE2I!2K~PT6@6Wfp z|LvgE*z8~K+h~6if1F3I-k%1W>f1aXqD{9&7x%e7yLE@$BXRFO9ac(VOX1Y!rc@Zd zUA?~F7evt1-iN^0dfZKZxs8y0mwQQwe_e;{x8vliPrElcoV>#J=ovql7y+BkIm>VV z;Jp0h{TgKZM2KYvRL-~g!a+vhB%u!3qpTgf`Iy^dGbeuLHlkD%y|N%?d0}nxe9wTs z5h-ED8pJ8b%VyT?p)39F1_E0xiuTN2EZyIo=B!Mr;XcJwu07FH3WibI@gkQfKjgf{ zrKX!>>74%)Mcbve;sj`uGvbn7+sm4105Ad1fk1@Cn8G7O$O60Qcq%Z)GODh|sw1W) zt0^ZUb3Y`-0I45OYKCDiO#M0b)i_JsJbYGJJI&$kcX!SX7l+X^wd*Z38ZPx=d*)1c zhn_-dC}nHGh(hD@c2y3r9{|=@NBwh#1Aa@iS&x5#4WTB@P&6y8w&;0NO_ygYL z#_>yPRhf~00O*$S*tRJ|t45-bKmam(nw)xjn%EvAfr zZXPyUiXB<8L!Moil~J_0SZ5x*bDj19rZViN|B)@8$izX`!_#FaT*4jcAa`|;>TE*; zsH~6EG>bE0+G@6DwA}y5wRh-Al7=#tpM!(>D|@@z&;T@q3=rwxJMht{+jkG-kLx72 zbVjI6En;8?D&NoC75sX4X`oerHgmrO*8RBRJ@D74K7aTKNa=QNESfuXC?M)v-N55F zes`XE(0~gOdZ#>Pzdes|-2x8fvDE0jd(kcj?RMyd;bU(iT;@(wqOVi0Mhe!3D|#|z zmP(s?aBX9jS`_Q~wZ~(5f}S{k^e;IL9+MhMWuopRs*}EtySvJWQMpDwU}GpW!&dy5 z<#L`f)fQghN0R9^^3w7y4nWpI!|{^TPeN)g77_)w?$uOGM=@hC#MCxQU@{x9kT?ib z^&MfY!9klCV$17zur^bX1g%G-%?UJIM4ts zrF+pKHG(`IO-Kou){^fW2Q3$Nb2Dm$BBp05(bOr@7{shNe?z)Hj60aji2G4mJ4gHD z^m&={mz;d!WbcD?jOZk{i+ zG(NrrZB^xKQqZvE?$FR6Ww_jC=4=eZj!r#=ciz{h@^}=L6!we8MyZI!2PmB_SdOjN zF=D%K05#V2Z1Xe)Ya zWpiK(h zO>DErxtQzIb$bh$63mCs^%Esuv?W!SX;3oAEL(2NHL*^PfF!{*Hy3*@}Ityncmzy1*&>{%@aC_Jg zqEL-#(>Ad36n@|}ZN=2X4<-$BtJ0~cF2VA)PnHqneHcN|#OZ^P#|>F{u%(ZzgyZgV z@|AiY%s`2;AUv;0JjDj+TAbU<;*lrCZJ)F$lFN6}vv*7)CBm|pF8wWudNIrjG7!!3 zvboWe!K9#ApUjKJ!9*b9%F%{%x>y~s-2lwU1WS}ws%R=B6D63lSYQ(?dd`#E=^Qu#ICxu?v@JFxhb^GRPZMgJ5Y3ezLt$gEWGRVTxquc3I zm|4=f9L{dibDo6eC8K}`Y|qrEF84+W_|{9Id}%WVB{UQiZxj5M?xgD*94)199nClj zi;O{ww%3gKrmnq{Ih|iM5ehLzAlQ;4TI!2u@+E*-b20dyjwEr;Zk>AlH=p!7nHxBy zTHCKvW#Chicvf*MYn8A#sB7V^W3_^+>8a~!eIaoBk{J|26$`K@9bt!)15t}YOcC?P zv{Hng$is1{*eZAkU$Ru#$tyHLX)#BI))qCU%0)*;B$T`w^Ri!d&baUMOZ}qS?uBUN znvT=Hm&)tA{P2ihnUzyh)Og7XTB{;juLlbb03MTg9mOc=PVbkEY=$X~rIxU^kdT@! zdjrqD0OOU!_#$`3Uv6wdg9Zu?yng`*$dE`oS)JGjgb17F7z*N|kU6L*v3~(i%u0PPLgG*misrbVuzs`m1%i|wIw+v&9_5pX|5*DZwSJfACqc6 zUT->FRSAX%mX)g@pXF=+0#Pqa8jAkP?PBe(xmExdQ~Y(=s!xS7E;z8YwQv}Xr-JNb zY4|8AH6!j1|LX}BoqgDJJ=XNO!$U1NxDuCGgXz$Yv!Rr?>P^(7(=OPPp#+ z)>=PC|J~I^>*c-}1!6C1oqCKfhl~-&VyZ^y&(g3>$^>|H%kF|C69okYAYAG>1Pf|g zZ+t47;n-r(wJ?vnZn8fP{&TsB7-}^w-e2>kQ)eG83|Ik-j*srHS|+;5@~EB1zVoc5 z{~|Ka;C>I_;cQXhsC^(od&?jUy1jxZKQ-05c!-IRQg96$s{w^q{{Uym`IMkl#w^V% zvVBcBvG*Qk^1dxL*E+*0!`>AnZB&25ZWvR5A=5LsskMUT$9*>PM3G>3V(-gC78gRajTs>Qa)^ znvj7Y269-==};v>XS)sD05ztT2wxqIUF|6joB0HPyl_b8(OfN0dfWIe=JwpYm#gV< zKHHs{!CcWxf7tAny^=e+d^7P;YfQ&vAb{o+>^63jD$#FAo1DkGQYg9UDa$nKhpx?u>#2&C6LJfcJf1IT2#8NQZLn5TuS zu!Dh4gaBl1()Pb3B;QoCn3IRbegy1DDzrZ@RR#x#H%49J1E#c`cbuT(s!WDk(c1W( z9SCoa&W@glEgXJ@f5d(LeEcDMM7eElexM-{*Y~s>v zQBP9!4=Fnz6fa)*u4`O`Nu-O`FsdoI*4fX1j(f)XrP{+Fbn>5@UsGqhqU@Lw;_0Gm zT$y3=uBv6XC^&fcz#>5re(@5YK3s3smBswXmTYM<*tj2Oly@%R5`>7g7$h-8l`z zxq_Qx;$aEsyj6>J7#ZhEc|I>?%6tIe zp{-94(HD6v5UWKyO}0ouw@3|`GfjK}n4~69ITDdcd6OJQafmePM}v|0FPOUczT&mW z5;Qq*Q`bW6FjYQ#)0_GEH2CmMrzI;1f#l!IY0LyVgM(&ni4fAv9{hC*s5|_hym!ou z$p;I&38j7TefN`-hXxhAs@x4AT?i2MuF-0Eps5ewp@( zVje5mkEWo8ClDV)>=#KB`h6tAgaC$D{P58vY-T5o82+qC=~!j_CJ)WY%^W50a4Ax0 zAEvRE$}AO377ZB<5t$B*l$7W12naBCfNmUC<=_Un5W!;$OI zuN<-lZb)!ZD@EwB-xC6Rm+zXQAlEC2r>1yQR4hFw8xv;7@`@W|kM-d6RK74VYqIzC zCb5L;pCbrH-p^-Ohy|t2JZU&Kr{ze%G88W9>`I4=%1$BoG}xl-Jnt{pSy@?L1t}kJ z#mliC0=}_ig(8oK$QkN`#US|a;@$OOj5}gI)}-pc>mHAIsajcYeUG4g+RO=nRlK@R zC>`)Gh(NY|Pbw+mbQo%Kx^AlC>JJ%>=%)}1PiO33smDh=j}4pXk(sKvS%K61S|S_x z0npX0Fmni9s^n^B9pw0J2<_z7pq9`;q6h)NiM5NYK65hiNu%cYN0LXj_yh#ZyKr*L zO_VU?)C8ad!!=BE&F!wVJsk_aE|RsB23UscE-8p|wcIRZVdO`7sjpLHO)m>Aj!IQ>`!H{^1HH^kboRabweNy-$ zS(yR8*BAm@+fW4}Z|)0A(1P-uh-m-|G+bb=b|&aHFXiXs>=~V`ZYkSD@h^~*CW39v z&vz`dCd6$y@!FYA=|6=D@zwPh2q_gfiU2}&)SOyZKAJ%5Za5w;$&lC~rT>XO44gevKzneeh728~D`a-Tks(vlGe5V&Fp21^GOr-|wrmbEL zX?Su^8~{%_k9^t{pW417@?p6=HbyVPa>=4DXo9x{>9p1;o@?b2$^N?aQNv|pn-e^& zQlc40l{pp@QE2lEW~s)MDy;QD*5)+Tcb3`Vl|iM$P=PJIcKwjf1cbJ6%Hokse0+3H z;Yy}qU}MT_D8)Tr>q>y@`4@YPx+f^boY$inaPoQZaNLCuz{f#!m2%JWvSD@Fk~FBv z!`yjdE%0-X0bAfwa{HzmWB1D*CImJw>s-&YjwerE=X)jLtGtk8RTJE zq_@{x`-y!Ql2$gG_uR|QO@25W?v6WC;wfsB?cdvbcF56TMac^i5fh=8jtTwP|NQP} zcoU}$G#%CdZ8||*6T>OWc}y>E<@~JPFIKX;`WnkEFXh8}+*T{n?rG|2ZM#o`Y)qjwrd~2s z|MufOsmT{)HaV_6DZl^`aupL4@V%Y^iJ$j9RI7=_3e3n85De|Vms%rCN^h)7ke_>6k>KZ8T2NoTfQq23h z;Em{QQNqRMUQry`T5E89tz@Q`C;PY|ks4IMn7nC6=k(-)s+kOTG^_+y9AzuhX)ff)_!RcGTTFa!jMmZ3DsAWpC41X zYL(bmpJGE3vxZ6aUECqT^sTU;Wkcv(ZuOPyS%z;ARYI8qlP{8pfD<8@s1rP5a?FSD zUREKju@_czR738SSmTI4XI?K8xPvOS^fz7Wp3V0~jy?TNZ3OT4 zU5Am6AqDG;1`V9!^>*s!hYdr}>&$SRvnXO%l_?&VH}i8;WU2MqDjwb^eQm?ij1n?E zyEgEd1SnJ~f^n_Sa~jA^zk; z1HK=uR`>!+^J{!W(rZ0m(reYi+r{aJa<&x1aMwTuf;+NR63c z2yKl@zme>@iiW3|o@|PXE1e#K7%r{wng)noLXIOw%1u`391_5XO5O9=moCa~b-L%^h+tj{ zcoO+}lQQCC8KIefBJ1-#DiNIm%>tnk_%FFhQ_AL*jo3co-p?)eQzoU!cQu-v3e(G$ zO!qSxTvkPcpJY!b+Qz(p$1QD4H)PLw{^;#;GH~#XKisEVm{oi+Txs8ZXSm;WyYynZ zjDgkE|FD%rf}_BhSG=oLX`|Q9f)V>A=V`k$kOP-@@v^mNd9TPOc*7Ws!S=i1goZ1z zXkK2=$5F71?bovnqM5346kMPjKtso+bQDTRNZf}aqicXhk~e2coDxo=XxpA33mTSUh3(GhHxE27`!r_K$wu~o z&?N+E*A(jfB=y$PU>tU%5NO(`wbhR*(fMeFn9ud+boz<}TpEe{k|*Vh+v6}aIP57{~p%c2n4q%>_~%LP zef|5ps41$Tr+ZGHv(KKr)?RBk679pUf}HjteC;ImE}^D>sxgv;H71d)WM7Ktv>IED zF*9ScQCD74N~lxuqhZ-(2t9lm+iY|!FRt=q37OMB%G+41XOl?_209nw#2|zQ$EY-< zE!It>AX8O2vN|jYD}*Fz3{xwaCfk~Q{q_IgFZt1ZUks3R6rlqH+gSLyO?5R_d~-ng zdsiM?ok0TT59y{n;t%O&Sy8bNvzv^eNIYbDxrgiFHcxagP3k26SVi=4^|LX8)_`mv zIYrSo?e@p1&*h~;@PkQ7~d zLmR20$s<^MT;686>#LJ5L@*J_9DLukH>hS1Evt#p#Jl*R`@6aroxTbkU}9j7sPQK| zjGgDkDUs*2uI&xTH1_PF$yHz8o<4Ru5n{l^S*yYqSA>R-N1#C<2|TXXJIT(bHTv*F zBQ#u=VuX+hYo^d)>K31vjEGHAQ8qNy zQ-H;dMl}Ijggacf+sUTVm(E{QX!{`<=a-4{4K<+Pi@`qhp1 z(b_j3Y<;lbI~N=52L*kWxDB-ZFvb8Yv@)3#4nc*D#q}A53vzuBZf@!(;k)&15o-;k z8%iZRiYvk1k_}6qVnYt9oM1Ols^IZlyw%NCG0ma(S?WX!;3hLa_yDn?kGH6!zic%j zdO1qSvhASxANDw0my=ew2xa+k5rLsIS}sZKPz+F;kE1sn?bPg-X?(4CKHRBcp^&A{ zVcMRJv%%dh3Y`bxh9EJCXL)5YHG%oG)?URiRlm=f-=A?OGTEoLoK2-==Y|Ntkoss- zQ}CJ8@J&D^@GJ5i7Dq?$HYk3%#or-k%5WyzeEOS-aq1dy4lWW znO3dmTjn4bG>PCNB2~3 z5`=;*tg4bH!REm~b!wr-kkr@TJ}4b2D=P!3e>jV32G==lq!KQiBzo@ooJO_Or=8Rs z`qx<;NCrOJ|AQlwbkZ~woIK)M>msOMANp}L6h>!8PPKALd+-tg!1w+eQ)xN?l+9#a z)z#Ma4h$IJS=9vu1lTI@kiI#{4%IRYX~2O*PCeU^%ZqZ?4A7Wf>xHbRWN>lUgv$GT|eI|U!`WN0h% zzws>u3_!#P_hRSQv%SRk=8|rBQm^Xn#{uIbYOm1lM z-sO|on=eK$@n8Q30D#}%ki$s8T3e9UG_>=T!pFe1lpE|;Pyhqt&jYQdmjBKH(Pd@^ z1GDCSh`d;1;^F0V)mTq+z1iQDmW+tV^tyP^e$RN#rJ*PwR0Z!N%J^K$D?{K@pq@4=k)&1MIcC(@c` zG-wJ^vdd6!xkRS@;oyBO$7{3A;|PD*A!6qq)PH2P#+j$!3L)FTk(O9umTu*&p}#t2 zokn@a7m3pWtIrT_|M%@7L1wt}X#F4lTx+eY7pyBZ<84!UFA|iJ*C$F7FkaeAl**_- z*Qx3Phq5=h>W)x?0XvP^+1dZNv#3pDgB2gAfPTu~pI$)f(B)#6%(}}(x;$Igr0RvOt=IQXb&=)j#D)J*db9;Z)CM4umUSG7iYSUefrEIZJ zE&o&TMF0cQBUMW7RNB$>9(d%VaLQxEe-g_tJ-j`>x>10EsfE5*x?bJp(#LCkRSw7> zU3pIBk#66JYWFUW9>I8PiXY(kXC1GpLn+q3X@_9iKqAh=mQ6w=!@S?MO`=+MT9b$E|PK z?>XSJlGc7Nn;YsO{VtWb?s-;7wc}GkBiqU0ojjtt+$`;+gMyd9?D^Kc zn)ExE8L-GXER#uXe)_Z2N+T;&dP0|;PMU3N&Yw;B-1UK29>(!E^a4Crk|tOn!U$g6GNw5B@U!!eU%a>$2qixu@a$ea0hA18VJ)PJUI^06tnG zYDM$Qz0^SqZp!Hv6G}W9*pao_A-s$h7H5@Jlo#nvHnSV;`4+0n=qTJl7BlDAnao%i zk6P={pet3lj6E(n!-k^HJQV^W;&F`z%||Qa{-=9~nGp~mDn0cH_EzdYSv|U)VKrjd z*}u72Q(^fTn7P}~_tIOvoMcul-*@JW*)hiNW4ZX)7bqgdCq^d{n%vAriX&fFJ|bF= z&pH}N!uBnE1@T)QV-LezBY)qTbcp^R(=ADF|UCq;?p704>F1~&ZpqM zf5L*?@AK!#0+kX+5F7$3C7aKeLIc(H+|D}r;F_U5NjF_>kHboTbTgdZ#ZQH#S$lY9 zHJ)8&nIVqT0IYcQO^Z${M{;Ijn4R_XFgIoXmounI3T0$G^)C8;J9wcO6?u3 zB#C^XAA97F7erml_?{wmDpz>=RbsJJX=(7v`N@DQ3}B zXO`^omAYHMB%3`W=f_S-b9~(=Q$*izw9_DZf zoh$3&V+?`fPTve>aY~Ccf=)JbuPn7%QYIE78m0Tw3mw7r=mvLS zTn3f-DXTl#a^q2G!bGsD0-JHAhqywS%(AVQwXLaQWc)`_8Ctda$qr0`aJXCPn)=*% ztHBW&n0*A-G8HZ#pDetH+`ZMM;dx%%3MlC@-Fh+#i$w3US{Xe+ z*nsI94rlUz5^A`#oN5ByrQ#UX#A69k!#nw@Dsrky^K~<-DLp~;g;-uXKr?oyT3AQ- zN2KE`j`R=!Hl%ISm0Rhu?^IEK7dSJ-9OunnhB35ri}=3=IO+L$st ztE-*UvLot4gvP(;)~a@zZTl9ehMq>(fwD3H#;>UwX`^3w)OUvK?P)lU`1UdufAp#9 zX-3!cD0!V>nzXHr6~u(v4s-!nu|!r?O^F2>0v@c}v z>}m2qWv#!PyRYw`!ID5?BI(3)8+&1WSXZ;p51BF^6~`TX67R|Ri~`WfcUa&G)F>Lf z9R<=fl@!#MHa`lYwi!x#{rVEdJpe)!mO%4tgn^tSGaGkxOX=_n)CdS?)Z+6fY6t|i z95U)qAkr)Nie6Th1_xy|H?ry-1M6IKE4Ck}Vjx}wy9rZRqQvZ-Gu*rnIC4l>nY^37 z>zh|_I%%1aZWm^u7+Y`TX_ zNxThPEU&uUFE-7%M;sGsbA5@5fI>`2WU>>=Hs`U|z~(T#2hO%rV@gQ!E|tKoaSrf9 z&JTj^3@IhlUQUHF-CpzhCu!9!1~85jBy0tJESA*?md^}%=let2}|PH zMs#`8QYJYR z?G0@+R3TxZ3F||!P=E7hR9j5rv^A@SKX#C7LNjAvoi`qm19SYSnt$r2Aj;KI1-b-S z=UgUHj>yln&nM|^mKPnK)9CfKUd>n*>QvJ{IM-oJ@$}fDS(Arhs=%n)D4lk7d7b!uv2LCgN&tcc)DuZ8$}nE{$~aIl~vU~eCR>G5&kc`}aF z2Ywle$(9O&4{Mv>#&bKa7l&B;M#D8R`W_|dYDk7kxJFVnX=~uyt(w&OyJIH-Pv$r9 zASy+K(#o<9&-~Q-h-`44_3#gdv{V=I(QbJAo?b98MPFLFGt<+1kAD*LEa2pzL4pI(Q$@7=4T@3@ZLl^WoE?m5r{s#eJ8d_njm-T6v zBVebm$}1K15`=N)4L$pq??dYP8i&)!20ra2g7}f-Ay}5bhlsjJT{!xMK=u%d0n7X| zBrud!CPcbF%|Rv|@IxsKhLw6sl_IoR%`DG8NgNu-D{alIwQ|d6PA@u2H1Tpo7@<@h z{6X1_Oz?gjloDTKKkP+`Sxx`A!s|h{wHUvx>|*;FEI^z^S@m-h-5(blNz_2x(fcxP zPuvbH8%jaWs-zDZMC#cz#72$oHAILc2eBmrB4z%Qvf)Sdm$FgF=V2|F6C9z<%Vc+n z9LH(@)Nhk2CA-mXg#z2t48Gi#Yy&qi6JXviSP9w<(b8f!>zBCsuL#%>?KZyeAuHOG z1`HhS!#z9}{{Hbrn1+?>!ja8r@-k72$b8^8DGcFs z&nhb$A&5qD!1M^LUr3ybiDM~}0K0UEQ?-$=VY1#lHsTstCRQtHIv22@n^`$P_YXV@ zWTIGZ)#YN-aR0%f2%1<%5Akx8JI6~~u2V#wIvPqFojM@9gF(L(=)N`9v zD?I$cdHggOu!!H8!Kx}t&4tLi%=O2F&?5qe0bv_CO`07euXJNZUV8+FN{q7wl+oPy^(Xryq znlh_2X0=wBsGrxG2Emg>iocpcaW{`_o^0|GYw&_uJS zo5xg39WN%a>;`ur+WLoghF}{J&;oS8*(_GiP%v|}>;8udmO8nn6`ZbjsuUzc-DZ!8 z^tVbuZnkOfYHe+8YYQ^BT5q&90Gat}Yp=Oqw5e)or6nd};@~{{N5+uZt}zjoy6-nn zJb{5_H-3I|IJ{~!Z(mmk5Pjez=zH>?EaOj0@WMoo`GNJ-1f!_u_IiHY3)Y8=dq=D3 zWa2Xp59o59mkScQ&QMthZ9uJ|(?9|$xJ-VG8P(f-+%JA7Kh(I;vuDIB#v~5wKB&AS zN3dKuBssazDP%BJ3*l4{4pP9jf1F^%lRbH@uX{LyNY96L0DO$xq^=3Om!v~LlSD{> z;l!Fp=;BNtr85#4udrAis$!K$Y;$w%4>BL2Dl)4ajAmmfO`MNdOxMkoJE3r!dRz4C ztAQJtk@wmQ|A(Uxt%GSp3gP;iQ9X^oHle%x)SZ__N=d`Yh=xHDF@tD4^vQm*eTvM8 z^d!sCOa5{$^CVDXtVU%1hK|}mUtYi-ElFmQv%nCL#;Bv)b?hSz~ zb1aVcIN13WWv4F6rpj-VB2#RJjs8WjwhdgOCHh;RlTLaZqR7O1mX(o_F)|_t`)On}KR>V2>T=a|ka6>Rd{mIx|AQEC z4u>WO26$*m_;m>Y6W(~b8~gv$2SIH;C)%s{4K!AYfEgh1m;zVPpRT#e;^JO(Cfy zuQn#ScS4iNe3SPVpoH6{a9-c`1LZHZ@U&1Em6CvWf~})7y6M(Qe1&eZ)~LO4y~jE-ho@O7V3uNqsZGb4s_G@le~pFud zWZR+fSIjWkf7B2rd-Z%E1P-kPhi1QWFgWYhT|@BM;*avQ|^9iw3)4qiy_>>@uMX2F~W z%b97LOL2a~hS_-$pZlg#ENbrZmS?)Zj_-pTw9D#w7V&X;`B5I+mF)*>3R;Jcx^tOM zLY9O*&+h;v@ZK#OgtfK0it6f_6IE$(s4L4k2A7Yj)@OOJU&fZJ9#sF$1<23hI!*?G z^wH`3TAL7K6xLa1QcR^@SN)IC}^3YwhW!(z{ORa z+W9Hsq@pp=rvMIuK{^_U!1WnTmG$#1;IIN!bP#w!(+0<5%=rEG+QljNvPprpE?d{u zitCToG3ZY^tITpLTz1_5dYDIN7jQA@1Edil>yJp}nEt3Hu?#1nR@Lwa85N(+B=G|Ab!?e`MZQNRAfCht{VGKltn-^Lh&%MxExR zDLab+Snv0j8(~q6HPU(Ul>9e7cV_}DVMjhN;Mi%p^!<2M(*=NE035&%^%0pn2yHJW z$G5IbwQQk6qhwyCq<+2xWalC}M|szq83Ybv2jFiM5Cdr=|DGd1;n zP4r1@6Pw-Gwy?Tdow4%k=Z9~(ABerWm$M0-VRw4sa+~sME8|qYtfdw1c?LDs|Hv{O z5w?H98V=h`$UW$^E6V-Gt(BbWZZ+}E$thRF zO`4Q?N-M6BV^|}eX z0?a#R)jGA0`rx=HB*MYVBo>OEY^#<`NTTH9axQ({oNV{r9d_ zYv}fZpz@DLk36;8 z*wu^sGS6j<43(Ie1;Ne@Lbvm%!9|O#%HSROZ$9YsDkWn5BM85zUy}rO` zc#MSqoKLRaj)j-;3GC-HJ9?h&?X04a3;9VbS-+aRXi9I8EK!2OhVL;!1v3qoE6DEr zy)ahANDbitEq$WM9)^ec`Oxr0!T7USze1OM!)D4(p}ppJ9~PW|(KEs6!aQn4w@5TJ z;G<<#qO1wf*Hv%8T$AUsu@MXe##+qxpXoGBQ8SxpgR{?<{^klX0`aeG$2;c?yr(v! zpGQ~-r#z3A-kMTdVTbyrlx*7JpK`~p?Jo!g5R;*nq>Mfbko0#}f_DJSjcW;pIdx{w z&`Q8j5(@IZxIB3Ox=g79y7eH6h7QXdFq;T&W3Jhh)jqCV$i(Aw5hDh{s-Qr*_Liy{ z#NzR2x}K|X#89^S9T?F_MI3n-ub>x_+m(_3s;W5#x!yk{3d>{H?A$^*t7Z<3TiGGJ zWw$-5aS4bawR(sM5D5g8V5Ir)TE*r%Ul+rP&JS(J%-prnP(_>HPDv)_kwb<|VeR}% z-uA_zE|QSQJYdT3iHvg}l6Tflc*`7z1|p+A#ktMLPrs?y=Bj|MrI^2@IPD@9=L}1ysvmB&Ks7Be@jb$sd_xl3#U1 z8vLjY4~o1m{mN-uNo1m&t-@aS3F+^2>UYIiotv9;2LEMiWp!SpSOE9QPo+d!Ts)c0 zn&7(@uq~M===k^;0QeY$5B96z_}DDkyNCmqwb}Y*Qo+y3kEhzO>Fp)8y!eDGX$?H zu1u1;Xz|b)OkqmBv_C5CNkJg9h^Q!}pq@PyWn~tLVq=>Ki0?>4e-0!@P(whIy!p`6 zGTN`VTe+oN(GJTV}O;~-D7LLBZvUaU{Q$$2-M)_X;9=NbOo!#htY z=1qf#>p3BvHc+?K(>6}#TfDj8ft6c9Z!l=9BzPRKEZV|BEFz&lN*HLTJhN?E@&NY1 zFN_Aymi*L@R>DqlxQX4EU*_=kM>;NhJ+Hux@rzeT=Jc34t4PnK7l6WTe{l>Ne8D7S zt^{TI$%N+!i61xKq$%1PFLx;aWncGaEH@8K+$vJ(&vR>jkqJWFW_@t`Fhl_k&_b>H zF+-@k`ZnG{*c346c9f{8L+eBWpv?oWYG&I_OwrJizcPBFN5?RYZZ zb~mr-{&23{;l(o{J0_X5b&(?o^>jR2029KoK_}C65RbMH*^r0)#Xe*0!C)rG76fdP zL5;XyD zR=&~^vW$D2?w%X(iy{ftF5g~wOniHU6EJAaRZTaOzl2ET?H#1 z?IIXEq}_qM#^OG`LmHD6k5jGNID>A*?QsA+t@5hgmB5Ow9`EgnTc*)*1TXmnQg1PT zB_Sjg0iUwjCG{u2galM^MUk_Po6=|RK2~G=uL8CmjU1me%T!9{&Rghd!7&feHIM~@ zCaqnb{NYX_%FN&9oYwesmJYpb-ILO;K{5uiPzIn5mVX^KeNq=;HA@!bqoYDl;dfg; zXX&%Td||^=kFOi7-@EKJxg)m8n@EG~YE6Y>#07a_H6EA*e?A=vFCF^ew|?B%e^(U!}dTLbjyqNtNS7y^UC> z9Bes&0HPYtZc*4noxm$|yHJR3`3asz?h6_In$EnXgOJNEcJ?Wn1ooNiF@Dq|GUvc=3_WB$3- zXc_v;?0HSW>a#L@2s(XxO-;6}yLp{vX)c!<(#h>` zOg1T(Ic`E(i)lTE%IgPEYplnw9(dcoA0=^)nfI=KG+fixQ_KrNxn+-&d3 zNOWik+Db4Sbl*^X@pwQ(lrC#c1*Ns2+jn%)Ncfw-*iNqRXg~5CcER z9h;QOyiqjn$$IEu9qlk!vycU6YRWX!fm&{434efQU2&qhiJckG)x1zEww@RVGGmUd z)LK?;bPg;J5O^ygb~QAdmQ&0H5FtKvaPpVpo97^lDOml%lN z?;W$zULx~jrcC20zpTIIGdj$zVasQiRAICkwlrLrhH2X`IA-BVU%}jttw+EviH^sy z%wT*Lfr2KrYCYL!r&7gUOvoYhax(hi$RMxOzt$0iE|`<9mXm%icx94vtA;bGjQ7V& z@KnZPd7w=B%X~8x?JyX0&0}IP3$3)r2vYWZt?;5iU z_Z9CSm~D@njEKFYJ7Iv}2cDX%o$&WYTk$b#wr?eMSEtzekH~W}gww?nQ z^8=yg_w8-3za92kB5C5YuMA!M#UEZBUk6eNn~Yq$&N`BCDV=d}zX~n9zIJ^fNP60$ zi}d=k{5+O9!_5=K*XHyvzW45Nc&3R6eg6P2W86GD&*QpC`OE3B2}qj4xal0C^JDp1r4o zv#q-MDGVs--aA8)hkF%#j%^|QP_Ey+fY#=`OV^Rc55VhJrH2{7&_4q&REhuvAKxM) zrrumyB~hs0E}e6uSl?FMzl%?J4*EJhTBmGTT`6qPSmDcpo{@(R4x|B)fU%z05Dl8tBmBPAE8MwFDue zO#Pqi=lcJI_f2C+jM`m6wEE|!3ROPms}U?>z{f80H^6oYIf$G ziOiqCr|dOI?f2b+1wPjy%=^%Ksv!4miwo59OwlKF0Wp+>t62(cS?$_30EIoR6&G6Ru{HdB@-EgwB?pe8tTznn2gjcJYP!R;Tvyy zBO+zpby^tOUuK84-2@S@X9x1W=`sp3JqCVG#hutoEucfgsEDinC9u^=Zxp^khma*8FNNOmxFeYYAnE0Jvf6GDdxAbS zt!piIjM!dN!g|r0oSmZ5KOC~#ys2JvM5Pz<#-Dr*BIemlw2nbY7FSvhF!Bv4Yq=jR ze?%k@wo$rpcV}a=k1l6u07Unf4h9{u+ihx<2&TqW8QI!wz(89#uO;UUz?v_Qsg+0S z)^|6U3_NI4X*T4VK)2Un@gQ&`JQUhiCVDr#u2lONPb7H0XsB;I#0|zfS7dZiy!TSX z#7^#t^k~B_pv|gcCYv z4wF?ILvzKP%si11r)P0tdre@xt7smLF6)XYw|aO2(`>67hVuq!wwrjyx&yXg&@w;u ztnIRYV&L^jQFH&hHyV7vJ{W;pX)Zipmqfw)NTqDTy)|wH*6$zZ{<(SYqdQj$=jk+< z_uZ{p=7PfQUCWC(9+p8EC!fb%qj_Ox(&KS>ziWe)X7+)z-AS$5X|D4xov^4eecLGB z4_@bjFz5MLTv^j$9*GUyE>@ZxAw70j%%^A@nm zn)3{|KKDpF)xl~1?0H)(l*U3uAw^`>ypV=wbjgye!9v2;&;y^@vR6PUMv;Kiz_y=z z)phGI8u+^5{$wTL#M6Yfgj>@A{JKyYkrB@=NvuD1hJG;d-Uv%|u7><|yAGDX*2bV= zg#jfvphJE|!Smk2paCM9){>l6Vw|t5ld%>#lw{SYloNY6C~pcU^38!sx$@-3?+(E0 zWF*mA+tAipNoL#-z1qsrR~H16PZfav*gW=ZKdHo)5NMJMd>c)o?2kBgGTf${H@o-F z@GK^JY(80V!GAv8Z0df7?3yA0MAhA2Cbq!1&B{ZE`nK8__XP{$BNWIL$Rl}}clp2A z*!TJ<3(=^Z`XQTBa;Zz1?qKwqXlJ6ne7e*kPU4D*o!%9(L`bW%Vf^$dl=+TAcJOZM z22<2AVIO1ny;AkO&Cn-ebAgS+yi2}6f=SoQE(Avn>izWH)29A{I&0Vii@2ABwESC>o8LUc(WQ*Ugu^NhZoNCZ_pk+W285v+3lD--%O=?HVA9pu8` z`oA{9^cYdL?X5Ng1Gv84FMek+faDA7hq(^>bS#pOodEabyt<*yxWME1ncX-!ympI` zg#4Kn1UXwaT|#Qn#+JvIHN*9VvWx@L=n_{|-`R;a20^qm`-BnqZ+ur zAKxbjon2vT zCZi}A3J<8kXiU{VT?~epS)jc|24%yw1xIT0SqX|?reZB9jt7+EJ6eu;rqE* zy32fVYK*6FEPK!t0$|UUno+BalYKI$ybEgSQ?~-+t@2d)Z&u<6u(Y7G_rZQ6vf%9NIEakc@fR zXAXWmKjjsIWFi(LLO3OOP6DtptH!cXihZ+{%L#>#H%;#P^s)|hbV}F_0`%1soToe( z>vM{i#XglK^oi%=Sut6i*+PBpI^mz7vJnCosL=t zqS|tzyvWg`@R^kk)#dhds{dHpq!_|1hMmxNi6hDaI+kE(h+8qmD2~Uhb-RT{HX5ZB%yrF zHht7 zwww38X$f6rBN@gdHnu$bP6ii|gpNv!v-3zVhYit#;I6H|9lcuzDgWcTJ81jjraF?j z4$a{x(LxP_VNst#4=IO=6a#Vs&!QHfMv^l7CrJ3j)iBug`Q%vJMbaBU&5FSXP$R7R zVHy7>U;kU}>P+JAe_}*ELQOa%h6B_3Ml-XjPd-=WRc-1ZVNygxGZ0OkvkOHmI`ae@ z_6|&r^_ckvYO|%jJctIbnsz{pG5?n0V03bNC=eo+{?d$~5bpgu-}@y1!7mmh+Bbw- zu*xUm0$HL>*hL;vh59w!i73(E9cx*{rAX=3+nKxDC}6~!a^peAvR(qD59SF@vb~+- zk^6B%JB)?oQ%L%)Xqq0Zv?g?(#uuzoa_JQ@M_uwy97Pc_1)U3`TuB7_ElJ;K;8gDN zoGqCR$zl73rQBxkY;wOHIL49`SgkJisGsdFZ^By@EW zy`aZ5=j*(}qUb=EHCg!4fROO{cDfdNNPtfJ%@Fi>S|D9VyL*|qWYU<5P_5cc%NuO$ zJ~O18<4+ey&xkqeeXn$z#-UX$bcM@Jd-Wwn(E5`n;1?p}!WrW=er6cwmu}?wlPCPD z@&T1vf8f|-rD&FcTLLN^QpnOmo!aU$)saLK>AjT*8g(a6jNbFkiMeh?c>QgC#wa@F(wuc8d#Ryh^dTVi;1FuJkgNITqei2Xc#s;Is_eRH z8Fj^8k3mRrc-Gh;>1j4P9-c@)fV&&r%KqLhIl(DbuIwQJR!ebHO<)TX2a6s!jrdP6 zX@E8{c}zA3C}9#*z=9OPep(MNUkU42rrSpANM0`XEiP)}XyOZa{+ih3)j`puRS8qW zKs+y~o)7z!uTUe>KZx=*er09xKBF|B`sP3{wKtxR04E zxL$xh)SShaV6~L&s?LS@5BlL!m>9y8pP3ft$7N8r&{-;F%Qb8QeNJrb2fEsLA}YtJ z7l`-Khp0hmPbuVi7S&3W>*An@q7hjnPJVySgLJvZrByRVNYfcfgoVe1hkwS5;eSkC zHf86Za=_1tGtO>vD}9a};QU*}fIUvim!^CCxo`wl*O&ks{8$>lvWIRD$7VSBP25an~ z6hvI~b!o>Ce{H|~epvA(a$zw_ji-@>u^-AAwQmVeALoeF9Q7juHg?5iI7|!HNz-)sI8PoL`Pk5eN#VXC@s~OV*C@H<1_d6z{{b#O zV1-QJT*HQiw7h2wKd?R0kwHZ2Yut{j6s9U!g;p$X6Gr({Dkfxzo_L*8($Qqkz4W=p zB|b0q!oH}si!tCT%NlmS+=_zu2Fqvww16UM)o>%ud6lSBTziI_80-~Egvy#Y_IP4B zf~L^M^h2vk8u;!5y6u%&ZRK1xK})J!?&eTki+!haimgc$tKA1Dugiv&qx^dqmmI3q zUnJm1*pCQM982lmPayx~4mg~fIht0Nbri0l;TERjhvG>2YJU2`R4;qNgt>y7MjsO1*W!qau%=!mX z+uKi^E%l8@%tFzo*L@MLQ}KL$?rmRFH(9ouQj!T^By|;f4@vrHTFjFet2#A{gjpPo zd|$p_4U2B%IDHrGIkmakr=}s%H)WvQw!Hf^)f0e&3+kSrF+%}-lf^Zk4!V?)%GA8G zw0d(U+#~fd-c0Mu8Xh(p>Qq3^brxzQxmaDY?l=!eR7B2CxX!+o5g!dVjecc9%lp4M z1%vj@UnLdcFrH0Yp?as${AQVye{R6a#tyEa1Dj$Akh7Y8L*#0cvay%2@gzQ?%GZ^FuMP93bdksti%Lk^Bpl($WK4 zAIp~1L4EOOb7r{YVR1K#ieVcN%>Io-dt)R9rAdUa%|YEv;dj!(x5r}^G{UFz0sRu| z<6^W9J*GknQF*-@!Nb_*>Q*_`h7^k|OJj_WflT63&d`NiG31B5&uq##x zIKG-K5{BY1DaLgbzFYet<1(h<)wUIwBWze;k5fy06>8O}q5(W}E*OqTkNVsTNevAO z6?ekp|AdM+6EdyX`RaEE$MY(?u&7Qxp0C6JW6DMn*S?h1!8#jh6DM8Y25&PX-QSHb zuQs?3@-lHbzI^sl1Wq_|^pth*Ev>t1ZyT=t8@Ts;wC z$vk5eNNyg`@c8)LJ!8zhPCtHT7{$U;*-W%K3!4{I9|&zu*H`B6zOJqPXx;Ub1vEcE zp=7mjo8E0>mq5?uvsv#yrIW@_45OrSs22Zvw|UFS`y^|beXkcXhAe9)0(1R@9s2nY}%Bx))v&rud-yh%Ppbe}bp z6}gtu+oz)}c)C0!6pZL624r|XCt9vIC7(E`jvNgxywrU1x{s33?-mq*xZ7F9==Y;; z(G!gH@xVr^+cLsOo zz9D;`{lDj&eQ&S%Fl*MDce++jS9jO1>gmrur-!Kz2z*|kR$YnXJ!o7=j?pk~;B=k) z!OnOAK(#%b(9~vYR-BUNi7>GEhtuJWhl2(3dmSy5EB`dLs| zGJ{q^%^)t=m{Q_mIpL3yLN5YV$Uj8T{pscClxtG^X=f)FcF7FF=es4DA-Rm7=;b_9 z&y4c9ThfNzRODDP7DwdCfAp1-gV)r$r3S0~cq!z;RITwCs)6Q29pj{3?wT){sY#B` zf)xGjTaE2()e%Jsnn(vG2k?q-^y_ooHl9VL0mm-Dz!=cp`9?bxN61o}6*qy-pSy{8 z<=A;syVeFh_JuM;|3;V}>@S~b2Jv*j0zY~Bn2`a~>6xX2PQ;A3G?42T z{aUDk29Mt-GkZqjuk31QRHM^qJIe`HFf+@ZwrTA5cCI4uJ38-B7x zN`5nUbuXPqepl&*r3HwT|5sbuv^Fp*@|d_=OqLqz;mXBO!nhLyz%o$dJ)lmyl5;qx zt3JZR#^zkemI6zTn|UGj8!Yl@4)U}7hUzxs+9^j#`}eZ}em^sMiFThM0=3rvv}T28 z+a^h+ZMz&NiTE#0`oP^X8LWue9TPRMyDRnbvgQEZNVweQ%R9 z->fYxj7(%>ZH_TC{14ds@h_OGA^D#KA4Z&P7sA)L`95Sca(0WoOvtaHJk;T?)zYf_ z{?MB8w(&5pHvHu0=m7i=woRD;UK+51s0`m`l2oj?SXs(m3jGMe_n!akNHaV*_Z&(E zik}_?f;!757k+@s)#o>o{_ioOer|M_@29Y+Mi;u(>1HWG^Z>QnZ8I=xUzw{cgn#k< zi%f>_b|$|@vV(i`XgI8iHPv>-5entIfXAKN`ki2E2 z9POL?Q9q%QR55tpgb*ixugG)fcT%5Gt?)Sl5>~!Koii6_9!&PW_}xyUEPC$rF^XXF zZicg$yL1>A-{!9!6;~b9T~t5j@%;1jcP_53K(5N_u0B@b-!848$?Yhu7jS7+rkF#1 zWL(ZEgSO-FAK&B`6kwpEzj1?sNd?}Dkq(zMmCf!^M$^#K@6r}H`TE}Ve`8zz^E)%RW>xT_otj$3-U zYcv)&5_vLQQ0$l3w}b$@vx$r^x94#-PMCuv`th@D$720eFTQ?i#&Sq&2jm)VP))$8 zDWPHknewQ_O^5q(9nSv#CN^H*4=88SzrQFlqC*N$FX-7ydNk}iD7^g94{-^y)4^cq zg{00lDT%$Yp=Yw4?1FpwMjGZro|}jTHDA)=nhI=S7wvOSQBmFLX}itaHH9Hdm#F?K z)o~4<=+?F8Km<2Czm1K4-urknCOE_H@+`&jRcXA>Po$^&oC*zK=>jZ0CH@S5M|9dS zt8}s2r*$~7zabw%-i8%^2HcK$_%E=8g|nRMB+jO_>fLgw3L&4``XO+IMHL>Yn^b{X z)ao8~^h8}tX6gR|U;(4@q_+PUWw10G^RKRN84(-CAV1~4dV)in{9Y=!REtDJvKZrH zBW8l@R|^~a=mw|3|HPW{o0MisD|t9HJS%?~Wv@&b;%}Uc<{tQQ6mzO0w1D8m3F?nt*psr+4t2 zM}S-n9xZudn%$AXS?(e8l5yTTs}s3ontV&s!a~U+#MIctRP^kvuK_d4j;XT&VgMsDX(uu7~Y-3 zZ3>?0Q+q!%*5q&$4L=1rQoyJ!4nd{r$U6Oa%WBaKD}{!>Qeq^Kg@mX%Wxk`yUe)g~HW8vWcsVb6?16*q&5 ziQL1QaWM27hPe+WY-LV$Hizi@Nd1LBpc>8QC&l8KSxGIF{6H$cpXi(cD;2KlEEwE zjCc&1qPtiq6YipmXTz;?_*WV^d?d@dWcNFoJkWh1@fD969FcW?xVgCIPSpOuCLm+g&6g}egE5(PiQGOeg}HluYasCBYsxotrafCGCi)%#mvY(5 zIYKZ4EWa4SHqNwkJR@Kq4_ha}qiG_uOvm9Xn~e=_(kPgI@I24^xCAZIUv=Fo57>MO z#eOakf+{jG)26>qyQL^WLI3i&H3gIEl`D6$NJ@6G{F@1mhbdd$yl4P?;6aX-QogU{ zW*4XyK;_pww#YW=k=Rt$^r?9(y4uX4=nWs#o0bY;%`?{RoRU3a{GGqA?r;-Cy8L<_ zAeOtPVX3g&3N{moc;o9Qs@RmoV<6lPvE$!%he@ih?A_> z_C}&$W0>nP&!QRU6m-=dXn$|MnsyRJ>~X!Y3gTJet&6E7@VlDM7b{rZ3;~wX4|xF* z*MTp_Ho0OZbIk*D!4v1l`J9Dckd)*@>TWug`>Jd#c^jZGnC>TMrB2|wkfuf54d3}4 zPeF8^ue1jJWs+~+Ahf%wOa?bpYyihSqgiVx!_HJmqpBP1Os=OE7V9M=_ft1BLz&(tzJv_)ILFbm7&=Z%V4^<>}}Q;b>5MzTH{>E_(>L>a@6Ai z-j5{I3VHEIKzU-3+5kGqq>ZFw<&KB`?fs0cPpUsCTrL&`&(p{A!R*wJ0gO^t`gcsm`Q#~&3qvf(z{Grm9y zP&UU|vl&l@Lwwwbgi3)rr~TX>&{_iB7eGEok$h6xf01f_(St}$K{$Si_BR90t0j%LWJ zm90zfG^NnqzL`+ap54R5b2}1e7GME@y70lMn4R(i({=umrLPNLe+6`R*Mb--v2Oh`NNSYz85E;gx$tRTaHy%ay|A? zZXmZiOGmBF)RvbO%g|-=*TCb8!fBwHj6(}K z*a3ThJH`-?q?IzT3Qc-abdp(Dbv*bsuK)Z%NXbmtrIWiKov@7o>AVoRz3QCH>*!dsP+tV-(QgecgdCo02N~h?4FJZ1W$e6{WRBm>sKT3zH`OlqJi(f zIOWLSoRZ@CU!1Zg`jJy&DE-ALsZjeAkn$#g%_BghUp~{3k_h?{&^dy;5I`fEG(06r z{UQ#%`8RQqK3jUHJ108IsRUW(8Az;4^o7i5Q#pDElp-R291~UlMOgaRZoRJX{Zol@P@UPAeTvt3u?DxIf=B^EfN*J@>Q|4FKL?S;{WU8~rl6$8(tfp_{9&7)*U-U|~tT0_#1JZM<|G zvZvp6ta#f=5ScvB-vCi_nfyTc)}I?rmVrH2rH{~X7OWD-`KwU4k1vVsNOuaSHs}}m z+)q-wZiZ2?#&TO7($AoDTX4Miz*Ue9FtaQC-J)h9vHh0l$$`P3gLmjVI~^b;n4|ph zS-Wx|Cp$g6rcGEe1^JUi2;vvz7iy9zq1^AW`~|!9U5ZASOdZ#LWvLdvwk4@&Pi!HV z)u)x6<${c0>O<7_1UsfbXR-K9&Q<=ZfX%D08mTlDi;(^h5u5IPt)A1N`(oy;)!g|~ zqMCrmDYGcP=76uh_Tq>1clCnK`9%Yq+wGvY3lOh0K>qc2 z>oi;HZR}df5%S$G^}hFkZW6kM+DUoK<|bC&$9_$djMNjWyz>H0G!GFeNxk=wwD^AC zi}eKmg1`QJxmd-T_-=A^nUoA4zcrQh?DX_}s;fqz?PzZZG*)+h2B%c~%LOU)=ruj& zrDh9fd5@f=fB@XcQ}DZens*EWSfSY-ojkXR1JxGid$cDF%NsnrPkDC2_xb&?igeLQ zv3fj<1$)aRh0n*vPQjVi5jJR)5tTo(0S<)o29FZcK%B_1tyssv*aAp?(c^usjQT1XhjbA-T#1DU>#4Q znc;N7n|COON62I3*TV+%4Z9a&Dpy15XLwgh8{%-R2};_mT)&t^^|{=kYxkQ`{#~0Ct8xeah;3shNoRU^-zVBv==iv6e-W0B+@&{HSuvJym zJ9^n+jU{NWbeVWIVhCwW0IrR-c~0=*7rdz)s9^>@-yv83f|91Sc)fbmMiH>d^UjZH z{b7&#X(w>s_;KKi#QVy7!t`YCO>v0KyOu*Ur<+gFQ>8l$WZYaTMm?CL;vXztv3vdI zv&IQa$bggPN0Q6^XLjS%_};~I*xg?&M}4U;^j+Lv@V<1hoGz`d?(E67146b}YtiyR zX~(7Df^lUE9V1ggUQ=1tzQ8?bu`=k^>_$R<-gTDg;HSNHzWc7I1d_|3@zJUbYRR7F zgXdj_ao8Y@t4+39UbAWds!Zlnn`~-rP?7ffSQ?K!Dn03Z84^D`DvTmBj1b{bnW37l@%~qNrA?)4BjMmGctD;#bUn* z911_KpS#r}+h_t6K<_O$HnWP+o{GUge_rP5I{TswEsST;r}0Fx@{>7wGE}R zw4``ZvoJ`p&?1E98eh6@qaS;2=>tZnE=LsPFZiMlwc1ucoI5UO?aj-?ZEW-?DvvHf)lX;R3(T?*hEGB0|M3N*nE6SWECzG zaj(a<&bFZ#SHvc9*%nx6@5jH5hS{Z&@&~M}o+=Ufqr-}S>Z>y@1Y9DT((gM0dI5Qa zxlvpwRjJ}x7R8y`;wNh$-^1@y#jlF>bed2kQ}N@^_g3hcdFMKEdSc3=I=SNFQ+0`b zj4BD^Qj-$Bop^BGx`uD-kjYF~6A9pT!!b+g+2;fJIpQ;m{=EeKhU%sCGm%?xrI^q@ zR%Z~6l(USbAq_;jmPb+MP$25R0b$7XoqNas2MGJaAGDv=s4i}7fG!jT(LG)aY7*oZ z+QeCm`!HQPsf|i2W3}=%yZB5#p`DSiuw-DOw|o0!J8zgZ_3k5(iu3!=<)|Lb7B#2; z>nHCk335jiAPWn!-~)Vtw^PExj*)@hlI#{6Mf~^iaSWtj`P=b!3TV zEL<@bw~(HNH0&P{^6e*+IIt6WY|Dc@&E@^dMcGzo^zXM1 zv13gr?RW3ws1`aYZYq3Dh)H}0EH4wWUy+N zE7fNQVlSu8R`?zRC$?m(D1PzA_BNNt5;~|Y@L3y8B)vUnXhRT_X$vTTypl-sdk8uq z%zQ{5M`^t6J;1j3eE-qcsVcMf+fK{z_p9prqexRizQwsCU%N`h*9ksQ{RKbQBx}7o zph}Z3b;>b2xAt4<%b1qnNn7vRX$iJgM#0tA(?$u_{fA2t0aFa`F8YQ&reeWzx23Tp z`@E_1!gwN|Q_uxg#gs`S+R47(O&t|h%U-B~%2U^|Ii8W;o$Tx{D|%{gj#PsBzaU*i z%Lag*WlhW3Z5D)EYq;LH79yCpmrV_;%%j&hWx^qp$INND&`85{EJU z>0d}WPavHOXCkZdZXvI4p_L>rJ*}}@a;o}wej~NQQmOD%2)}zFfz<1T7H2jr$}S|q zhc6isMbg`I*~6vcZ4p*c3q1v_1e#PKGIEm_Wts1ah9#JE;}Y4q^;6slo&%x^p1^fu z7Rw*I`@TqONXALkuGblHrB&*~>2&$EZrE}SC)&T);7w#zBzP#?4 zn)z*Z9Vf$K8=>ksto;3v*GkYLgT|em`*E>fHOX6@^3AW;`XateDD?jliT~}?r2WS}YQ?!+cvCtCVf)}iV2|T9` zTAffxIp<;>#kjukrtkIfO$S|*-HpY#WX}3v)TXkT5X1dl5DL?4l6U!Lj7^*q=kxZ; z6Mnn(8yBD{oKfFPkp5*R%iX+loge1AmB9eX?SLD<`?wdp2Ggh*%#_6YG$CbGG*2D-6xUm z{d_Hj7$W3xK{lw(Ehs=l@P~HB>;9DsF#Y6X#3{opqn2d)g=9-eTBU25a;+DzKF`;> z+5@G=fd27hvHsAB@(H@x<;@nVNF(oA-bbTwcRrbFp(8ofQ?hAIko*yjN9pAi)q^tr z&DPPgA&Nd!5%=N{t#7Bz`uc*kM`tczRCMsdVI3R@`#q`45bt8#R8Z5D=W;3d-4hY` z$l7VT$s)vk?@QU7+3?<~hwXu~h(%)l(;ajsa0FTLT@x{uvw9GcWR2&1Z6lBE;0fra zPa=$sn6KvM8xK0TN)M_j1dZHFQ?a=-M%v~bquZ}NC^A4_c5SR&ZpxcEUP+Q4!X#}k ziiz#_uvxs%7ozI;dED@^Boyx60R+@B`(DiBxy$a1er<9YVtA0oFSB;+6)4X9=~vGX zM*Zz%w~;uBC=3xtG_SSb_&Dqf^x?4eS%JwfT_9ko!}uz{sv-}c47|5d_2na?xM?d# zX@p&>lTNfzB>|#@{zOJ_9UhS0;p(n}g*B|_Sg=9@Xo380%~KH>zVmi=yK(Q&s2sly zY^3nH1JWVIIyF%bIX)FTjl?c^`^sq(Z9-T};IQg}69@0o^~0~5j;#>wjngctPlic$ z+O&4OF5kNy9YdcfDTyff$_h+6pE}Gg6C+N2OY)1|;qb?(+$aM|%8x2l}+ z#m*L6TB&ahiGTkV%xk9xV^wJD5F3J&jGNOAk%hGB8bu5=o>XtQ28zPT5Rk>Bn0``} zI!aCWVd3^Z3%xTl>l(*h(CY?q5liva)^Ezoh#1E>z*_>S@7>*i`USPSUNGu3*jZ8+ zF0-FaS@eR}tYR;7_!1{-M}xu%hU|q~^0@3Zh_;`={W&FmDBVt5?dMFr-Y{tsmL9X* zF6_VGZ6Iz?^I=zKca;&!rr?;XbXU4>Tqq*`Ii$!p%csfOJk%v1F!WEU^rcCVzZkac zJQX1O(rN}>pNTBbfV9yZqMW96N1@Y?OO>9tns0asuAb+&r?D8WIZD;=Bh+;mNl3CD z>em@h!GSPnK@9}hRrnffd*@{tM>kCm%iH~;n^0Z|DMWlMOym$pM(px_CG0?GOz`;B znX5|(!)3t!{`_FmE@>S>7x19XnWPQM)dDdu9TSdC$|v}_p*aGHWtWvG?QM~d;|*gl zcg;31oDsPgTvqP}yn3IGiVm=uE!c__WM_`=WzO%~T z_#e;b8YS;+BErVox~n;4YHnge)}o;@eh{DPvLoVD$s>|pba=QE84K>db;EUViBUh z?nQE=LJ3PSnF+bg=8b3N%`Oky(3Oz$!tEev$k{wmC;Hls9?zQOV zo46FR;x~!4<5p+K$B8`7X7a0MBw+f!v-l$ZwU+$vmsF%Nx5v5Tv7<2ty9mdNJSZyYP@#5u~BuE zIP9Zy)(=rfB%!YsNueP*^oN<)3Ul9JMOressUPD(wS=N%gcvSkm3$oxza<4>Wq_|(*-T3@w= z(09h-Ba&2YKL*4w8Hen2S$o7zIUTWf9C1Ua?E7@C=5`A}350t-+dzzZ&lc_CyE6IW z(UD?r;>XGIb!^fsnaX{pwFg0G)sev%3VB(`{a$ES<$hIC*ycFGftl>?ku^Ld4I=Bc z-aB`sm2|J*IBe)C;x}Q!MIzNA%5UQxqK?&3Bo!LsxS)7G@$vg)D$+Q&+dWhyXk=A$ zdeC0KtW=NdoJAc!`a{*S5+b=w)~>-{Lh}qR9X@h6d}5-`8kJt6YcG+ zr0SFg(bd$x*)*Zq5ZS+FV!X+}y{9Arm#8p?N?CSackKkvYX`WiVP@WLi+3)}_27|! zd&#(!3L?SeQm(G9EG#Tz`@?L6K0HSI(?Y2{mHf3|7g>JjFr*auX4tk}86yV%vJXuw zEGm+N+%I!=Xh77;rwOpI&OvTH$jHcTK`7?ik4SAx=Irc@goI>lS#cD4hVt2~);n1i zlKrGbad>!mKDhkXKvGn+-ylIG4-lg`M^0+wH$U+_IDA7K)@#CDa}(eMZ4c{F%$FOj z)Q&r-l#`*wMmcz5)eETU{BGYtThhs@jZXw24|z4D@;i2xsxw9U2MYC=U&h>RxEMj6 zJ~w$Qed+^c-ok6ees_~?JeIsY8n-JE6>TXO#E@h8n@mHQr$c`y-JGK62AA`4TH3uk zs#V7VF5Q+5#v}j6m^xGKw~jV2YHGaoV~<%eD~+977IwO^Bf*OqWOtZk?35QuD6&dZhII^QE@ z{;iO4&C}&goYJ=WT7|z!=fPEWR8|aw+hs~YNPWcYkG(L>qE7V(bvHn{4X~-j_v_KG_1rgj)v|H*!1Hr>|J7 zo2#1RYcsf259C3PwkBk6@&~@4idYKV4d2YlRdg5r;T*9B#i%QB@Yb*zmbF2;33cRP zr@rr8uiz0c))y%r&1i0P&BC9^C+i~AyLH|!yIKtxbMMl*y=Ct-)7tm5`%*dDN(%Gf ztvqK{?@%);K<;+RMCxj|tXP^M@jQ1H4_f3hrI=QJ8@6C@KK{v?oaEiCm~V)6!eCl6 z?&I^PYW6)u_cGPi4!|78T$@(*NT(+xPqvNA{7%aK@0Gd6r@#tY7mcr%QZ&Zits%K6 z(?cM8A{aPp%MRwkPRF>rQ1T=#`)Tc&Wk$-OHQG5Gj@0P-67p8x;= diff --git a/contributing-docs/quick-start-ide/images/pycharm_add_env_variable.png b/contributing-docs/quick-start-ide/images/pycharm_add_env_variable.png deleted file mode 100644 index f408372211400e908c414c3446e796abb6faabc8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 86158 zcmaI6b8u%()HV7GC$=%MZQHhOn-kl1Cbn&7V%xTD=jM6e``y33x?Sf~o$B7F4|@0R zz1He*d08=7C@d%d001i?F02RufN}x=z`GE@KQ#=dO6&jt+P#Oey0fBz8@|1xovDSj z3BL0mdlP&UcMDShzFg7%9+ ze6e5v{psk6m9y#rKcN}H>q*Zk*nT#P+6uUSKM%W6qQ0jR{kXe?OZ8d%&0BN5J+|v4 z`sF=5Mf>f@Rdem6%45dowroV2|Ei+EwsK{J0f!UBy)nm{?CNk%o?dXjQ})#}Yxk|E9Tb_t!cC5Ly_j1^($1Aa(xHjb+&2u-Ez$ok28Kn?!Q zf$mnbe)U*fq5X#g?lI<{_=}!z`u62VBAwHea#&`7#Mx$|e$AL6)>x*GBMPL$adL|@ z$j9xYWY4FfdJ-k3G}}Zn_Ju3AP}G6Xf@-dgzR4Djr#^kc_` z^=&u17!DQW*~CWZ$uCTMiq}D6Zx{kkC9lVJ)?*a6*vC<`nn``+aB+fAL0ioeVyYqG z&m{G-yllY;ilR)(a?+*+rxAvdOs7%cHeJDRl4NbkvZ7>Np=g#xZK8)lV;SEa z6PQI;J@FhuXFikbY#Lg7ZGV`aobwDVK{nmyIEm6EbXVw)S1D2!UFWuKjvc-9HXNiH zS~y##paz>XxH=aFEp{hr+oWh`Vlj7HgrbwKJ2!${P4AmHe2#V?Th>E^g8znlG14}1 zit-puDRVmiy?E=i-3`A%8mD==(=B!|n3KNO&_FJ~N0sNV5O)%x^V!N4 z`nCkMm`4@`m^P7WcEkLFa;an|V6cQay(kk5>z2;9+DTN>GbIS8j#di8;DslKEo9y? zHzQ1$sGAZikJ(int0DllTor-r0_-CeN zk;a*J*F1;ofysU2PLPF<5-i*4mv@{RW>@u$kq%MO;&QRN%@*qdZtQecLH}P<<+FMJ zFiJ(a=o}4M;trm!%^j_x?LE!{N};+%2&{~Nq&y8J(~snU7~-zf>c05c%#)n(vdOQH#8TAo${zyV;o%RT0-JH zncBtdp=*qiaJDt!(OH9*5EXkbgoBx(6;Po5+HcI~rArEn(lbT`N(g7_- z)K3W&3?|e+OJE6^KZzHlSYTbTI)QKSxR`j*^~us?!l}JNixO)V0gD4?5^jwk$R(`bKX~{lQVWMm$YJ{FF{<@P; zE0`1R#?jM0`eIhvduS8iUvza%GVcjUeE{fiwywaKsDUFzf7y-GK#}aJu^Yt^p1$0h zx)$0_6Y$+h6j3zI2FPM|{*6fMfMA}0WyiyeRUEaf1>eUY=HYduyE^P{J0pfa zJPiUhcCT$U0jo7}XLc!^GorcFgg=7W2-9$Rm6?_8Zev)zR)r(T3s=+FzdjODkf9;M zO4eGgiYurT3{Dg*I`?7m_mW;loK09*`_xQMGf)%dqt5cMNt&RxozWIz&bd$9#rcrn zwIbp_6uBpq9*|xclRBx+9-ZNIWje>c5+gx$80;IIo&9;sb zbfH|Ad#%CI%{G%ABNquvd_HdjWo2{2=~|L$5;zU`_kzz3zXdH;AlTuO0FUa(luR!b zxBlS5eZrOSIM_pN>Jq#05n|Pb>;`KnMYxjpjxZ-uwXq%pxl^#hxe{~QZku7{@XC;) zgn#4#+kfY!C8CuQfS3;z;@rAnMcYEiA;drBqiei+eI&Ico`_wtj6PfMf%7aOv__5B6fhXaAE|jVj{vPuUXmIP={roqTNn|2 z1CR2)^jOamp)B8VfxO>jp;l%C-vh0;gUH^VWa+$`^J<$c$l5UR96IbZGjFVHf4}6Oa%*{T z5Ic&sg>;AgV{~ls5O$j^6}BtePY=)iyxjTWya}VGV4w3y3L|FwMEfeMvFHt3z=4gS zX~T=<;2K-eUm=yNhyfV$s1}qpTLt4MUqR0!veTD1 z0vvvaOXu{0YPeDHpi#|nKGCpk=36raIO8f~!hi}9fmmvu*qAE7`onh(3=ZxdqL&M_ zX+jTT;`1Dw&~u-7_eB{x1F4MELM(u1$f95j7@5wMW()iAHytQk~l>$d{d}R+zQ8_`k7IOgQiW^amyGEvhrwE9mw+d_Npon6%`R$sEXa7w?G@}2@ znDF3NJ2N>j;Xrl&XemTooua4=6b53_P^}YTgk>c$7G82g1u>x+p|D5Tuf{emdbSGH zySU+uS40S5j#PZ*4Q%zWDvV#4J62ji>4jdo4oqOdTZyPd2=1VUTlCx@W8gtFp+KiA zVB{dI#60FO<{ecwN+Qs~jWf?lEDYk>+z1{IL5T>WKKP9xpzIBO9KnqyUjBz+uo#+( z2%rVE{r>cRCmfI!+jF!Z2h^>Q$$bo99kOpblC3NbF78PS#GW%s!iPI1eU94}^S{B+Bt~ZVz23U~38Aq4qK4CxLAYg@4=DpAl zr3V>>F|fPh@sm4#tu|H&>yA`oj^-cYG{l7yO9Yno!Ql8?DcG5X%J83nahF!@lHSqU zQ6vNDMW;mc+mACN=|tWMH!TZY{8FALKBeDR+3hO;0VmqO*G>)qe!FQurd(TaaY&>% zThvjYnHX30L!~VQD;6=FDTbZYxf||Yp73kw0E>6U@34Ifxmj17pR=f}5LL z;}_2z=}O)epzJRQYK3p2gBwxc_&r2!l9BSl(dPCH;I^Ta3E!vy)t^?+N_pY8ID_f| zF#DpU7Cj%%#{Tn_u5HTz;K>QHf;M|Hs0j6-bS4Y#M|3E=P& zRN^KCU+`BWng0+ThwCt%F0tDb;4($vKtwKXC$%NV4fowp!bahvmv8LO>skY4{->8s2dptJ#+wlgWK4*uH?BKAP+aiuSl%v2{ zi5td~6rlWoCF|AxjHQ`K%LC?T!{3H5KICK$S`}?gLy)J3Y<}!&$e9d3WkMEpHdRPz zgruFdibU9(?l0baY8-yYyrT_IrMgCcd-h&9P)gy{cBZBClZLs6BF-($1|B>sm@_oqO(@rN^urCcmeP{zRxqqR#hID*tAA zz2M#LW#xwR`+QYzDmfP_had-w!Yl2>IVzPdK)y(@=fgIS5hk|?E7S%0R`@JOsvZwR zQXM6eB7 z`JAbMl$8lzX)f%)=m+EY;3y0s|EG)Ks{D7?2!gmlsb;TYRZ6NR;|&E39VRb7ZYB9( zkI_>XDzW6i*=ZG|yM0wbld+*sJilE2%z#AfAdmVV%OJ{x(Cc`i3Ok{30&ryYT2s)I z67LHfM=-xKDPS!X87--LXA1=jG$=htk^)p#3Cnm}%+(ml@gpy_k^toiz{JT_Q3*4l z@?ZGG_+smBv@#lIMS~cq@vM_*EM`Y1eKiMo3VVIMR!$^)#a? zGbJOI<9@EntQT;T@Nf{S$#{S=Xj;upEb}U(EWkg(15&Gn2m(q#Z0bAHGh{4}2zplI(%pBXdLYeB90?&o?v&iA zrjaH4)yA0#=Fw`SzNw>@QUw-<-w9zccq(*H{t~Q%2+355kKA=ci2&r+R#tcO9j7y+ z6Q|}GEtFva<`2&OmbVV##%w_}p7KDyc|@_wjoRgX)Fgf-BO|<$Z-iu&Jc!-D#2B$6 z6rkI-zKfpv577fL&(wO@@<)K36q%H&!F=FIIw6Px^K*ethlZ5X4l56t~ok&Frg6i*Kse zR%*SYplhnUmm?E5!BKPzJ!JRXgXDAr{z&VF0fto){CiXx=AJb369xZDL8oWKQ4IF6 zF}cPXX?*UWHOH3g?@62N>*ee6j-rJ&a%fl66L`{f6I|;VahBJq@dynu$p1=X#^KNV z8{Z7O-;!Kf#p3QBP6ajw$>4$2t$2PrflmCMmX?xpWp5LG-dqR3ct3y7Nyt1~d`A=Q zQfQ-7-r>Yo#WLLdy`h;8J$Pmjs%)Vu)?%jIUyB#$!?u@tTirMlwjE=(#y0=K*1)_i zbH6!?N?m0#&h_ph}#LfFi$^+ zhZD=r#x4k`*L3m>s24`SJSqVPVuIWPMQQ6E;spkfHOdS04)yk1oz-cb1^Cm$jvUg> zM#|>?z43I{>(oVr6}|Q@-y9uCHCSBJb23noSd{>is=WlG;gBRiOnqd{jk%T|Aj}Ji(r>sxw0g_xX20vbLfr!&$-JtujG1)5;22SHG*>?vE zvV)1qq!)`FXCym*^_=*Aw{6Gzbq#N)Lf50*mF!k5Hcftc&uc6sD|`^)on1Zux_c!o zp=Pi#JjJ(a_H`M~cg^&@BmBWmnxduMWiRCevwHixs0U#uEX)G2fFGBT{;8Tb&((Mq zn;T+Nl)HJUw*Lhs)@)3;CUE0#M8&hh;HS3u7&yMGW>e ziuB7(74}5S;%{5*uH=ZgR}c=~{hdtBP!zcDH0#cVg8rk5#p=$(+u!qNf{%nEC)|!z z`7}>VXXzzuP&BTEk`yZ_P8%eR!n-BUq!V;1qS)F-g&Ob0pwoU8&MTm4(DNky$s~D5 z3&J|eE`cEM=UvTIUl;>dACU`~H$YCLtE-G%Jt)4xTjB7m`VebzTd>yyYvS*Hu{bHa zeQL*nh0diShDHxeh<*_>1=3Qx2cqG;4Tsg&0aG*P1BBrypN@(Tvhe{ps4!Em7|=LR zWo|N-Yph;|e*0$n&Y+%Fk&iOY1^ymFA*TjH@?NP5xnQt7RpXhy z<0(3`g}9C5Qa!hj~oipeNJAR{lm*0J|I&m-Y=k?=unb2_We@s`zOOC?(2L$t?4_Qq`gh;YU zW5~-zU%#Tpp^j;$EZfHZJ|ED*95yDKRL|px0!OL`Kmak8ljp%eD`>JPN*9{*UG3sI zeh{`9J0|yz)cHXvi^t%7!27sa9hH!77RuwCc-39TD&V(5Gg~%)dRwnYBjew|Xj{G|n8-PD&axyP|d+@Oy5N0orI7uRqC0HBfoJAnXyGckTD ze>qFYi2OQ$hJj=u1PvYJ2LSK^62bz??rRsDZtBPq*nk^1wrEfY1dnSS{yKm1h(m5F z5A|uKi_a-Xx2G*0o^`Y9!wXo?YeH8rRDpQiq=cD-xaRjyL}8RLztYm{>)Tt%(UjHI zQPZvTRgJx^E>FoK#WLk`Wy;6z1ubTod1lxg4O7$Nsw$cVU}piZf#}j@D4mwxl;7Y3 z@EPFT=t}+gMccd{Hy43w>0pi#$9x_RZlWvoGKe$A zCpiSR8#SmNvm7mKbnED5Slp)0abR!~{1ADRp4s?Zu1);aFEq-~0QQ9yu1;bCxreMqQZ=Zsp+36T9YrD49i{PW+-u_|!d)AX(qj}M{VW(GTLs~(xfs4@QgiXy6pKZM}{ zN0=HJw-SGRR7ItQB`5mDMhm{L&hj6_lPy%{0*5u2=Zmsqd4VMB#K~1>xQVQgaRqhB z$0A}U+)*)os%&H91A%EYng|>{y`ApMyJDS^_~|*;)^J1whQ{ADc2+jFRXf_(lZLmt z=?Yh`x=KEOlgmFF&d|KUE_pyseHH5!s|8MZ*njkXIM1ZumJ^dweig+4Nk)0e8D z8=?L<6;@pm=g&ysR@^jVBoL~MifPk?G>VJN+ka?wSGD<-h#3CoO)LSYOOMwEuqyyb zg@f)xY};ZTUc^~1uee>ZL8|gXu|aRDH{mi|8H^=ASpM;boUCuV;j*;hcQ)LXOr*V$ zueE=YcdIn-zo99*JxhFgnxB|w3C%@(EFZ=RJp<2ueaNHy4@_0NDiaMAJdkQLkKkZ2yrba#HJIf zds*eYk^D@&J$wy5)QHYP&-u# zgXEhi4ji#|3Xttli+DY85~-C>(^gm3G5qLhy}r+M-0mg8?+foWp7r0TU%dyke>MjQ&vmLpDnJICT^v#U>s6{*+ zJr;-EWQK1>p+d*JD|OB;D_Ls6+*WFAWJ;z!mKXbE6nK@T5x!_tc@jc;zHYi4qeak! zZgih|D_KFnF=a&r@wiUj+jpqLx}%h-s6i>=PPlY*bTypSXn0#-N^J+;X*jnP>ukS= zx+vxt>*^Zjh#KA7L8Vesiw?Bd+gBh@E?UEc&Rw_1>6#D)hvzoOe@aD67f@hnd_ln= z{BtFjJC|;y(eduCaWP5!ymn?2EWyJk@?j75r^|)nTbUImPrC5pK=h-? zp#IzXZ@zMBY9u{#9Tnl=dYI6%P>`O9{6zVx>9X9Tv1SSQPXkCi*@ugam|Y9ouxG`*73;IvG96 zpchJH+%gH`q~%U@HoMMlop2r7wp7fIi7Vl1B-@{^bm3_B-0nzxo}VW!WH^0K@Fr*< z&5#i0C4tWkOcZv4(^0<8&4yWP_Uz_{gJ5ICMMOzieQ{toow^G*)X{9grBSg_IWOR0 zvYxS&8w)a-xF#p62^?iK;cz@|a$5iqp|cT50CMYVs{eM-k#e3T=PPuz7+qkf(!9k& zQHSH4VzG1fR1O$3Oxj<*9jEd0d|(j-`H&M!VBAF3Gy?Om1`;bP-=eDCqau2$ zAqbXAqQJ9#@>;)?G9VvUbA1XB75dwvmS+cLXLOJRsva6cE~Z+T^z{!5{yo&z zGip>+v>}B6_8W@`A1xFIQF9Y=P&GA8S(um&fxSB@bUwS>lV;5T1?c~MVY#0HHgVJr zkT&Woomg41j$pQ3sL$>$-UioHvI9>>z2{COjK*$yV!;RUGudJuiToAua>9N(+>lS2 zpw_l6jYJ;Iu9h@i=f+5CQPf%+Rz2Xc$jdo5GN&CW-^JZ^ITM{$N$5ogCwTIUV5i4w z>Kxm8zA$^YPU>`+wutQk>EYZ^>&no?;QD9%SE(u=Z*go|N;Df9G#hQp?)t_#c;5Ov zOfG$NY-9)IDpOjxG~S)m#hJFfdRWA$D%;I5QQ$kn!mNPK>;W^tDo;(qGTEh+<1-D+ zIX+DO1wt2$juqgBx^RjiHY4{e{!-bwH*sS41OEZ z;x$>LY}B~rR-p}1L*!j&M$#O+#3+8YXLtOIzqcr4lwcryIR^M8UY@BZcb>!Fbcsjs-qsB=5=%}OSev&Fek?*Q|c-&~S)NYeR|NLm( z9q;4g2;FI~Sfv3G6_X@>%+?5ZQr`bMy1_&8a0@ce+UPstNYgmmwSoPr>9c~Gt670K z+Bn)!AebAS7;LgGSKV?mG;C{diY+eE+BTyGNz%L(KqI{tUNAA2Y1Du@8cRV-&N%F`4RJM(n?GP12` zEX^>B&FOaY+&1lgtVH+)?vsNcxZMPm3r^o8nc|Y1Q zcW38*UpFUz6V!#Fj9-Aly2u8`SGP`XPXbLi95T0Gtmcwr z78gghf{Gd~v00IbXroZNWF{y{YCvQUXDy{QVu~8e&30WcjB0sX(S&RaXCi1KSREZ1 zamq`ZQt%=u0CGlKT^~ZAl&%isXJjXBp}X>g*Yb(wRTO)kwV*~w^M%^@8t<~-go0XX z8K5erl7C?UKM3H&rR-}~!Lbd=Z=tzzowZTSh$jH4xQ90X*7GMRow9Hm-9ht$-PeNz z=44r3yqQ7m{h!!q`?y+aF%9Q-j2F@2edCOrYAdS^-{avCYuCfg?CTK}{9ECLv;nC} zL1ZIP{O7McKT={>ksHN)HSFNfphGtVA(6<~Pkg??*qBvxTQlKm{(9$KdD|Ikw}_3V ziE`*vDOG>@n+v2on|yn zhUb|P;kfn2Oef9466;Id#7uY24E{w)g|BdP z4NA(96ncxvTqwV_(G|Kp#>ghS8-AVr!m*M9p;57&8|T(fTUX!P(1*yw^Y7s4_}Ol= zzkFwM2VXOxX`Qd+q~qopb3~*6U#7QM#h1Yf4+*8`NW1@5cZ0Sc80F;=do|AMe5K&! zVMseVi@4Id>Y-`b2|=aH6ERV=`ulPFT8qFrY67%#IX^S$0EaZA#A@&0NVmo!vN)SG zQ<3F71mtT^^EaO-7bgdg!pH(*8b_La5BOfZ;^^+hm|2N66O4p+CH1ar%l+)qH|_#+9RUohfauf!@#8spWoLKgCDsQ5r!{fW{WSY2*{&a7=81YelI4T4|@W7$} zUp)7&GAFq*P^O)HdU%|x!tD=8qJx`SK^SFVm^+l;n^Oc4?_!FA}@i{rMZGQh-T^?NP_J8o@{~z|G|4GXK z=l=f>m;PV+CV=@_r~lWi|0m7)?{eljDqO=4;muV1|jQDN?Sy~P7CU#ZP|lZ(jDFDNKdq{x_TuCHfgW*(px)C*H2 zC1S?`HwFE9uLq$Jsx!D~tkaHCiW~|Cv#CsqaFw-2ivk}d!uT8aPP;Dhq^o2k2vDP; zCx z>z=Qe`EQ!vq;E%(`);V!3tw+;GGI*8**qWsC8c@QD$TO(@W{vx``x}UM56AJN@S?u z-Q8V(e}9$^jd02Ql$<>hu!4QM~p672l zU)}dT?|2raMq|cwabjQq2nYyVXyEQxYJoCkvLp$h^Jf5VcKZnk2ySn0JKV0TU6&;_ zHL(EdnwqMfj}H&du+Zx2>dKz`$J04{zF%H4!GA1QS65L)USD2}jg1S7iw(nnXo}O} zVOaGeAIAWAbS4mj2h72N0cP_VetZOTby0GZtsdX)?d_gDk%42e_otOf+&@QCz&(nn z_=I*7!D4lo7qgS)+SKQtrJhuTM5x%<*homg z01yH6EG&ceheW)k239AIWfOz`!~5ZbhJdiBD3r)S2?+_P;J&f3GvvVNGceS%CdvqP zUJ}(;X;5H)apH)mDC4@?>vmQA#>Vgw5}~6PJE)ZPz%2yfaSw#lhUb!v+tDCFPmI^}+|6rFCDOs=(Pf7@t;7+y#)>E>oEFf|wl7*JZ7!Nqpb zWL0ZE;FjeLbF5I37y^{25iLgSLmYX~5}Pww54YKDpqSXA;JcsTt&rm}*QQ;qO*=g~ zNfWLG0|T>S-TYQShg+Rg^_=QcQa*X}w;)W;V&OaKrI0yTu+H<@)}=BZ4ITX#K$^5c zBoZwLsiIVG!FoUu+6WaeGdue;;{T}JF9rTSW%H4{9&IQn1y)yoNXV1w=Q+_ypFcf! zpD4#?njCCYE*FX@1>&*;64t52CU{=RS$CVnCGV-vL!+>f({P*rd=G|(zfwbQ$Bj~< zf1N!&{Uw&;eZPJP?w}PlOEl%h6{S&Uugn7i!tY|Lv6;46+mFq9DUh9H%LprYo_O)x z%|54d(m?C3$`p~DQNW8onb^ktT6tvFAUDG9Xg=X`dd);2dtB6fJQ_N)U|PcMz|N6! z08^dX0_y+THiE&oV5g-)0G>4{#MC(?v@KzS-6$35ET{8%gZ}D zI$DfTkt9TW?5#Z$p+-YP%VAM{9oP_R!dUlyX$Px<{>Emy6*gVH(hD%t->!Xc{X?Zn z^Lsw3was!QmxX~6p!s(9cB)0#;xmu_`98g%yHw*Eb4ekhmCxvdgpAY;ubV3wIM<|%4q1f5!ND$x#YbNG>}p2!Q&kMp zCB4=7^kI90F^8;l>vw!}?`+NU0LeL9^#@m2Yx6@3?|6X&=a^*Vj$fP^I|!OV-B5?~-kx|3e?zBmxBDer zjh=T0#+rbs=Buf(=hnuD7FPPIPHUKAJL7=;F3#B1K*oU#f(KppQ^y0NDt~1qb#Bi0 z0fZvA;SQ6xVh*Otzi@#&aj2tUVP!78*I+cgXeCtV>Y18cbi|;E{NvjqePc0)_ZIQQ ziCSVWXUn@Ii|Tqu`bfns_cF;Grg%;I?H>qXpo?nOaGAfo#$VM8bf@A%C z;JRyu7IN6PgBObJv7-d=J9?U!hnn(Esgi~O8%ebkcO7R-RO7Ox%>7_8ncB9Eh^sYn zsB^oqRh>z!tGl{SrsE474t{@=X+lyatz?;n3O zI*K=ay+sOt6p=Ejmf+yvGxgx%{W81MT=MeTT6vvK8c3t^!Z*IoBC47kUZ$1f3MgNX z>#<*T5!KaKxdMUY5qtLR9v&Vlo0=kQnG4Ez<7Q`SW$~liclY;3Mn+O!)ag4I-ZC)f zdZ~20+g>X{6u6(PVCTLFlgt>Mwf=7=biR8c4J7E*rco{pKlpm35r^G57ZuVOJ(0L( z7Lw#&mowP?}$!(}%^f=xv6Mo!%Kcy|5 zv19OR_<7J&*W9Dm-pMVG4oHG$a158-Ek6lvZ;MV>_sep3xNPP=(G{k;J>S}_rM2wO zewvGFUWq!K0$XOI$=T2;baJ}3Q$=NEoi}kSC-5w!tI6q3Mt*rORA9B% z7`pt#{)WBmZ}Tz{%&OUa>B#I zD^tcoM>jJx4Cv`@0<}P_H`i5fKuk3InsW}8eMP5^Fywr5H=maiCWP9eGYsm^2nE}a zmQ)eYAq80MUJaGd74_uxrjJ1MeBo>ixj7d{MnnL53*&oux>)bZU^4#%#Z<*Uo3Z0i zejY`$_nFCty?0>4ggeHLv}LEleX5h^n;#`d#wQ&g0LeIyYZK~Zurg@)m<#&Cr!f>} zl>jv=!tBPWOzJiJl!$RYCTI18Pgjabl$z0(Lx|@lQwHkFKKlk>nrz? z-;OLNGeuGkObL{x7GPu^67%(#ch(LfZDTL)fmudF`=wr+s=gDLvoRG6YU5OtGGnAi zL=@ld5`O4frgl>HSWZ_LmEaq0B;Wnil<)I)CyT7DQVxtKnUtygq8z&s!NDRvK0X2h zK&j(2f7FBX@(e^pLzF2OXJ=pandMO_DYs3$?gXagEc6&u6^4;uS>BTL`{(BFm>w&V z4zx(_Yx8d{h$Vpe4Gr`8@oSr#hjO+xQEHv_xI3aBdJkTg?q7`Asgfa&j7Tc_{^8O9l1W7QC~{~lOiT()R&~Uhc@vXVA5ltc@`MJo zB3M}1DQ+%G#o1Zw#qV3iG7<`xNIB1+Qz5?ZKc~h;epP<@!?R{;`koAd;eHx2+~ivCgU z1*zZw#clpr<3F)$PPd8_X5Xj!H<%>^M6l6d#E+bpE~}J)hHUMP8^!iAl@h( z|CKiIF9BNNvsE!gwfBen$J=+jx)l9M4< z0{NcL_m(JMUaHJ=h&3mDXnfq+6O3i^RUrsjXuwk_fX)`5|5(!iEbL3y)GhXuv@*D-K~tgp7;N7hLU?#oybFm?u14>yy5N zQqYjdR1?@1i1;HK&1C}9%Pr^6SX2Rz>p_vRV|c>I%J->EwpNOtF2G!I2%!Pu=R?tz zr7mKz-56Z|x;`kO`g3JRE_f2ejsk7BW#ya7`g@TAXV8Trr|{bG^9PQHmRvBj-qwBY zW};sO_*7m_m=yKR`4u(n>M7`zea$Q+Oih6Z=#k=jIf~(x|Ic242lMSE+rxj6ZV61Z zB3vz+m%mHb`8!|wHr1L_jUF{+?%!8zTpHQAv?Na;Vel=^Z@MG(n6Y@Aq^Qt^O#dZ| ze5R)OmK9QV89Tg1YI(U*zYjiaHbpHoR47Km|J$vFY_H{)26;9*M{IHy^^8$@Pxo!! zNNF?!{vLX$`Upy;$A>r84_y%-~MX7ix`zGP*1J6JnaN{K!xrtj+E4pU3d7gVhZ zQ>C0NoZycwbgXxjY|j0<2&67Qy0%b}#Fv@3+v=nU7v0<6*X!|t1t5hNJ^-7Vnl83_ zAA0@{`#cr+ftYWXE!%y4eE|UhwR%NZycn6J2oSuSU|eq=X}0}nFx&6<5iR?f>D4ah zwa>rP82%A5_lky6_+scOFG}Ifd!8K7zJtr0&%8?HJh{-gEBk2TR2*K!18H-zSvxlX z8D!27DNt!GU2HzV#3Dk2Uqk6`Hd(CA%&5w?kFlj@Q~JXLq}}xFrlvNLziV+eHe^#N zI?&a;gsFQJe493SJxqtIS9{zZcM3N*x(>m{Ubu-d4#NNBD_ySL+|l!|5i4_exzo%htNacWK+E z+6@uqId(Mx<&8$6ga<=98Lj*mA(G7($}8Z?3U4fpFEd4Iv)YHuO)dKrHKH3-Fg7Lc zuvso`+JmO4I4sE-Sp~%IYurEd_2N8H+45-(MJ*W+|9v=`S`ls)6507BU39nDO;XjI z__4zC6=`_@H%2l!rF5KETH08Vw$k6p&kXh%s@RKI9EQ!7!p9nEG!>HWDs4z=tr_vT zzP|hi;UN-#D2ar#@F*H;>V;=8{Z_?(b!ye;t4#(xd$fH#;Ffj947UGAOzM`)TTIOa=ILjE0I?nL#pGO?ABSgz=mgbmP z-5#t?%f*1>V)~zRipZojTI`litW#wA-%`M%gjpj2nu=*&X5&v)7JnpTdZ$|U_xh#U z&-Rg75Z@9Wmoo3gs>P!?;eS6V$(N1tlfUb++2lB0jV$)L;yOQ6RywPQxl*s}@fgL) zl`TjAnmp+!dA`fmX$Wt=6$sL~%=bz!85-^y3S_LXo^~-z3r&= z&+gDvLbbY-KiwV-!!}v6Ergk=D?}D{l}mq>&IbA2?Y?1Ga(@5M<AG&QtJcy%(*E3nMK$ z7|b2DHWod( zPOs(Afr(hE+$S_&Fa}Qs{|RXEmuw^kT_@L?`AT0`y9_*4fCsC~`P)0c1=rEibEuQoX&sVPazbFoY!ID{KE1)hbNIE}Ebj_~7YdU*~kYy*&g} zR8$0no6!t~t~EZ-ekuRJqIlZ$9E5JO$tN=x)eSzjImBV_ylWMA_tut{XY}=~A5t+q zTw%MrG|%^E(>@uJjtq%4h1mxi%H!FGFG~i46I=T#JO`fg>CpD*6__iz_ zo5KH72g}1U(%0X)zK(spIkYg(eB0my&G~tPVSNuT<>OL(27}zQjpvZ%%h$YQ4*?F` z@We#r!E^#b5mwy~V-7(83b+8@kJ-Wvef?1cw!8|G-|^aIWbW+OgJumR$3+8|E6?oS zH<>e4=;RgNU-WB(fp5eCgUS7CwRS%ZT6TAD7-T%YbTZYB8H0{De5&|R;9D)aT z*Wm8%PH=a3cXxMpcMI~=dKIdH5d4GKMXV#jvx~H}7>Z-eYq`oG6`Zo+= zl3zLBYC}z_RgvT2K!-G<08Xc%>TL#`8QoWj`GzvV#x&X|oZ9ybDyD1eN&@S+@s!K- zTo?W}dTr8~y2AhT0k~xj?`$`eTC(66>95i9PA8X>Qw{l_n9b%eL`d9?Z&-IMwT)X{|Xvo(TSrlZwelGS-T?d?ZqTC5R(=ZmK%}`$6r*~T2g`Q7g;{S$X zo@Bb?+uGQxm0!A2b*fpYsim$%D*dKQgPU!T=jNxf{L?iZrD9_umE+)SesFh%hs^8G zrCXgH)(!^A9S6?s$J)u9>=hqp6kjml{afixQR70%^yO+@p~E1MU-EphdV9854t>5Q zC5i1)=jEu?I2FeL(<#AzLuJ0pFqtsZMt&%Y9c9afcYe=CiMUQ8eU+o_bs1Le*0Kos zvr*r*D|3V0X6%I3TALK8H7XW9Oih9UPGi{0^yIMF?fSz zj)2$I;ch7!5@7ILt6f3(^GvC`np$|EkXX$$UEeon9+#bJe~y;Xc4`Q~ll|i4-^}-Q zms5cRej?<4;5u8W*vvW`f&u_ga&y5aRLSm^OcgVzAgW?9-Tt|@#X;pl*&7_{-@x25 zQiYMJX<$G=34X%psJML@0y=t0d3m{Zd7YcekSTVk9rK=Zic#tNZDmhgbUF1~8A-(! z2_NB@hqd;jPkS&KAfP@l^>rfsh2;xnr|q8X518S?#L5muu$g)dw!|2b&c(3=cEk{V zyv@?N@+C{49?Q-9x%O!6HT~zuFO;qghUf0!gZK$=?bq>`cIYqF=XbHLLd1-WzvR&d z3aFZtO;#~x^n0TXsWZ~j=j9W#Lh&3=J-jae!&qfJk0qj06rx~rI8ko2NUyHG6aobZ zeg$u#+KW5Qq`m2)2%x-6i0~l5+^qZwAmNIL34A{se7Rnm|3WzcW8Pa`b2eqyJr^2- zK}@}SSL}H>Ek;Pg;nV6u3$`9{p=0yt^7Pb)LHUCUQiF|}dVw`;vy(~F?$w9lRiRCO zh|QHs20TxsQRTZU(;nDphNr&Ph(d5BwN{XPo%xkRr@EXF>OSjPu7_#c&YpnE(TI}l zg|4>tztPHr&mhf=&RJhy|0?`&5!|XIM1hTs9UrU!_^aO7WQ#w0hDL9Sg|tfcM(lAz zr3eO~L>*hfRsM9JIbhN5X5~l!S$Kr1UCjv5?k&ISns$GJQ}NSG{xS7>JaiwVyEir% zpiZ1fN#Af!8jzPn_h%oa$Z9V4xKz!Pp0NpNALf!jrwznE<8VQFC!!{Q8oiRn;rImbqIuQ-L(CU5 z&QWVJ8UcwmRx(pt7|E)tO@{^m2nf`KY$I=(C&EggC!XY}RB;e%%eBeC+x+w_@3xl<5wc;R z&`RdX9#`-PCxhwq1W`{K-CJq{xd3>cK&aw+S&WR{zY02=dXt=;Xu2g z#V76cSFq2^$X_~$9&F@3GIZ@O|zyP zd(w3I+Vp1coo|Px6Q3@7=GtG|&>b`|GsNh{*+$c^Vt=5Z{HxD8o~`ne61H7Ezqxo@ z{IWVJZRZFuz0bH+0p!W-YjaSDMTD+dJ!PMEHaakF(V>}j#23apME6Il`Kc^~FGx`y ztjrEYjm%FsnlHL9cVRq3u+mhO{j0)n{NW!TA5#*Ag@rHf?%pWaCf32aeWj5gxV;h( zf8KrS;Zy*AOGkZY@XaP~uit0{OU*1@C0cc>E24#vqI{wS1cr?1u6FW*S+3sKdKqEKr0JD|n8*(ovCK%>CzR)m~S9s>grqq|JjX_8FXLF_)0^ zenQIy&!v&P4|SF8AAIe#^50~?5l*WTCY=y#skb-MXv4?OMpue+((XI&l2|R5fQA{v zbn|g}tADlDW<0(hE<)z(PuIIucDOistdf}VXwW`P`rXQ+hEdhY&F4#Ja_YrNFIiB$ zuJNgJW1XDP(ozaBS2A^)l)~X-B>Yvvh7N!$dF_RLKb^N<$A3-8#z&R1n=KRwj^2d5 zMw%-F%2`?f-HNZTPdYuI^g7)w|IS~3?wFsVRqxnahX)l}9)DBR&W>RZiI5J}+) z_|iqeac}nm>g4J50B}H^Jf4F7`AeS>F(AsoH-b~cq`e{hT{ZKwR3bz*m>0?#kj{1d z3DFV3hLrO2cZkYyx%mlWLs1e7sk>9V*9Gzgq6KYG@T!-))%*8#W&jKW;XxXC=l}(a z-MzC-LmWfd`$G1Nr*r-}+Oq9gvyvr&Ux&taRIu3Ijn!@mV!2yQRq!8NKo!PVr=v%n ziNayK(socsQpj`x?hH42yWx=^q0ku-Djj$OjF1H2zb2Gi-R@3IX?~ewiyMJn8FgDy;t+Ug~ ztb0}LP>@o9P{B`c=g!*2%kRidNh5ruBR<U2j5kyY48oc~c^g#|I<+HS;F# zeTQA(Q~6pR=I1EcZb~m+O6H=O@Ttt+W)eRG#xO3X(ZkdmUY#!pqyPY)yY697kN|<{Xs;uuXNU)&`$lAxgm%iN(nq3T~%e^)OekrAIP`b(&$04xq z7o7Tj68)NV$*$^n3jlb1#qRzd_||l-{7n*cw3c5&v~O!5L~UBT(t8jm8;o(RVv@1^ z-7$seM5Brr7Y-*FCrlMLh{E#pH983#GW**U0}MAD|B`6))voU#Zx7MfF_ILf#5d(cxza^YnYUV;)&N;yOG!lO7sU?U04Wma-ny$O`8&7 zvggHBK<0LkNk~AQ@gRd@kN373xx=l=s>6W~2fVR>at3HdXgA507Vp@knL%WeY5WSq zI(d{=-=Vi|u7(D{?Y1d>jU57r3b?yb(Z=P3#uq?jPspD5 z2~m-rOZmB){g7s<`0EU^1TG~_4~^oZ(39n`J&#uhBJDOGX$2#2JJ^Onu6ms@-Jw8a zhk=&$<=^lNxx0OghMVQbG8@W`5e~QapyM2D4Gh-8or)>;>2>AsSt;-I^@*etgt%Nv zu1?u->lECj94o(K8(0dpBdB91j*pB{bA5S)7Lu1^$diyO00VT@!a%3jKR?B*tXDdi zv!sWAm@(Owqs$KH!`JMhJX0Qm(6_#b1fow{rpr8mbu*8H02|d9tzQE`W4{v7*1jFj z=QT);*<$;XDjqD1MYO1`1?n?kLcRLK{)DceOT7k@QPNx3TSr@5?TBIe{83eQC-LIvUw(_zJ3air3Bg%YVu>H0HMlZUU4k(EstJ;rckZYOgR z;TI|(ML+$(018GR7fA3G6vvF4x0cwI>gJ2-uLY0hIp2b-KK#@G!3v0TC2129MCtX= zea|XlgM1?-==964B$VdSdCA{4T#*9k;M8@1+>IwYFuTr{r6!Hrclr3dp?zvx%k;9& z!8h(`vaH*4dALTwSv-)i(Wq`^eHEbhMJk66= zG*QJEK039WEG9Fv(m5EkoFuwF7Gg9;c#?ppEaWcB1M~WtqxVx;YsIIZ)iFM=+E1o4 zXD)O+%$$x?S8s8>>yD)r@RUIsFY4<0e7^JN3ijR>hg1!{wz3h7N%&8}hhh~ZBqXd> z-sLJ4&z1%)?M*aaR}M6sN%DRWDzTvRr3y30Qh)LizdKM@Y;3d?(@K8=PD>P?5hK$` ztiGCE#sB>b0Ie=gQY3x?R3IXTTzAbFmep89v zk}@06pyL9SCYD=YH~x@wzOw278_W@jFR<>Q9S9D6*u=g@ET*5%W-lal5QV)Q%&bIe zdPH%SrrNGAHR~#IS{*T(NV@Bi5p(kW`a3Z9>*n|5*6j1Z!bzm$_$Vmy@O$2p>wPjT z!(b6VBst(8{jf|~da$N(r3y{6-QoJp50z^oz4RHX!3 zf5janeM&F*+<$ML?N9hMdm?B9VvOD1kjUW6N6X;eNcfZQtQ_?6K)f}q=SU17b;8m` z93k_xr>|U}!raU6^*{{y9iN+Hs-QQB$|3d4ulWmcO^g&8_ooLb4;u?Fxabp6>MuUe z)2}*P9Lfv*oGMR_#r_-$^IFy}bfL)1I?7Vbmd&Cl_&Aiz)yGF3W}1-t#_;1yT|>2qPYvWr-V0K`Y>A+sk= zLBrbEj6cN|SM4jSIN&xn6^br%N%6HftV+h)+lL{R?g#}!yC3E?J(!UgcI}+LIQq<0 zgQDcYu$u0A9RK}bZ{!QWe=$ZP2;jwVeMgsnuho*YXc@)#*vxyCz7nR1=f3Xdq6Z59 z_M%zp3;eqh=mBjO!Z4o}#8?!~VNuT}?br$nv->lNN89QFR(ty#?XO!|y(Ss9ek}xK zUh{o5t5f*hxE%-Tb!Z4!)Vs^rcPg60`M# zZcbNLIp5epdlOh2^psk{6U)5_#X4)>qh*=@PC*kx{ZBFC?*~PWr#<)RS8R)v!HpFR zczCJ*3^#-`y{oxyET)t;fBQP|cN9M>_c&j_@Xa;9=O-j4Zi;M{;?mc6pzc%_HCyNi${kY~lg<;~ZlzD7Ry>F*io4{=Dt# z_ugEhom<#HjCrHr6O)Mj; z|Fe+Sg5u0*_aoyv-d)p`38WK1NHNt@x-T?N6?ClaHv-4KImBjn@=68~hz|y2Iz8N> zl}s>4YJnxMuQWK5u-XQ4A~2dlla7K%JANN86)or7pI3I3Ywv^hu6_)rqJ$Fp^N_$m z2#{3{^aI5r|Mk|rDu3{gvvE+EseaIdftbTW?giDoEI*LhD z(xQS!Cg|ytcQXE4edp2?Nb`r?5+t1~ZLV$O})k2Vdzx5O! zRHR(eb@BG*@&$z!#A^PupAK5Ee*u8#%s}y`nAB9)r<=o#jX&n*rzVyJ6cl9yiieBU zxla-x#~bO&OUgg4xrSXDU&VW*4lcI4G7HF5S68PpnUj%_kfaOq zB?^Lg7$6Bx1OEbmV>dnT|M#}N%n1O<5^k?zsY{NUky>hacsnzx zpD2rrOvr9xUP**KAFynOqBbx~X*5})_S^0d{Qvg%EyVExj8D9w(vMUQO|9#|6R2Q( z6W8OlyZDRxC+Uo%ztwIYlZ7m^d7jNf!iO8)VN5uS#ypbIs$DW7sL;30S5fDS2kb&+ zpHr(p-URs<9LDfy-Jb)2RoXou;yBVb|L|~$c&6*U#i~6lqVu*#QIdEuTg|m)`f}Uj z%fu@q>HIZzr-Sab{bJr~^?VX>PdlfxthX75^&mtP(h&YjW~NBUw`+AyfEO9Z%VhKj ztY#impN)Ww$J5OB)HjobJYa`>NgD@(-gxNqgq%Nuzbx)}SbK}TdLbCwbdgD3)xlm} zgnL4UTUr8ftQh|xH4ZU~%}-^TDdup|Mf9zcMd#Ph%C{A%eQfMMmWVYRzTLG~j3O0f z=v=Oj6nusK{R7iEukB&rr%VOzwuT8V_1W#s#>Wb@C8|-^I1v0xK<9_{Alvs&m#a0F zm6gpDN+}r&fLOpFem5xY9OI~?ql5GfJa))zrNJDTiJ-Op=3L`av|we!T}pF5Z(U_- zDlV#X_3nIgv@wyfU?eDaYNAKUfr$_dG6ER)U1p+!$^`!xPrSp4mEX%+LlPB!gkYzW zi@m#+(hm~ZACf9AwQU_g0>lmyOl7P|Eo(G8+h{nr6SMeHe`u1-srOhk>?BJQAV6uA zTa3*|@#=49I8XX2+Cf^?$@Myp_V{ejlwwVvxhQ5QRDmI5x)QTooy&%G zpu%>+#Y#6PkvmibfnaEQE0vYejLYuw5_Jw;ybNINxEP80k8_YC*>sw_}ig@;Kby3u^9cf${rBn>LyC>ns zu`nwID1GxYDMl~)8QQnS7x)7W10!+OP4-clkceo{_g+F=+~w}X*w|R*QGzrxoi#S= z%f?Wyqa=FWckXD>&yxaDI2=lMbF$PujP_Oz*9|b{zh8UT-BNV-jt;fWXDScNLI*;x zWaX}nXgpdOD~f$3-<2d$d32MVO&qqDbQv3NGmU4a#D&T)>&*=R~j4_o+yso3|u-C|1QR7 zYeK7>9V^{SzA(2RXUl!3&diinDpkb!9IquuYSA3wGIH~%oGt&oOl?3TQ_HooJ5!##qDvXH-J%y1u>%F` z08w89J|TE996C~DdI;Sb$f?~ygd!YO`F_m;^KyDNbao#dE@NIo0t_uI;EO4Ki5CS2 zI2SIMc~F5@0PBv8b((j&>@sB+IEiAx0MhaVrRv|39o80rk&&z&I(Fo05qPi5T%QXv zV>j+0-5KkexWy68s2Y7-i{vXSD>2s_Ku*c(;&Q`U?-d<|>A_Q7*SeB(9b*cb8Y?JO zE1`^9^4`UwwI}&+00KwF+jF>$-9Or9Mn)#*GX|4ewj%k0$|(M~%)20-b%*whz{ENR zbA`45>wWj^yge3W*cZV)sy7Y;%+N&R#Vk4lx$^+Yz5f0L3klZ=j@UAb%&7HbRHOYx zi#a*Bt7Ft6dNuo_9AUw=?{H^^OZxTTqyo1U&MSYsrU+S6#nex1OJ*^wtC%^^h3L6w@XzX8YM))V~snpdqU`KYQM z8Z!k32v1plP_>?~uCSf}#+~ecHx1>PjhjtuWMgs&>^I zTkFD5H#M_`&hq6(%f5BC9c>rKekwbI-RBpikA~$qB67U*X{Hv&%8_Yx^VfO%WmpKM=w;Jy?C&wB%Pb4NG?m-68g#M?CkLkve{`lu{MX%7$; z*=;1z7+a}QacY zPAF22zB^~c)LWWyG0*u)TCm0}TuN=%slu9=%I$_vbsq$|Rxh_ey^nE4@qii64o<{F zWzBAxzynHpN3@v(Sz1H-MA5ynbmt+_d1 zNafkt>}keoRND7?P+V=dIRY|KzHe{)BVAN^g1hI3`iRvU?2R;(VR8<+?8aHFe?1qV zv^$Fz!E!YlzdT?vdJ>jnCDzn77Z;XHUv$T9AG(0*(RhrLJ~Qj_LOq|4>S%H}N+9{u z>nG#DuJUxsw}N^b5wzx{ZOeQcB;}Ahbu7fJ^C`;Zj45BNJpftcKYKm2%x@28$<-9g z(dmSNw_8q)S4(`_)@l`5*=t#`)NH1H z&j-f%usq#n{4}xTE>u@8DYT{i~cZB$F+JZ|C>&U zP1faZ_!1+B32|StN_SV_RI9^+|Kdj~iaQ_iElmRZi{)cMcKcWx&S=TNY?2Ma0iBw` zWOuD+9rU{Vq}AUCf%OwVISj?OWjsNfXJOCQ`=_JBGDLi}fD>8pS5C`x)2?{N&;duJ zBhGIaeiq`mO0eVbU<2o$KU2Lo`?tJyheuNOkW!z`?LF?WRD?k>cMU( zKVr|l92~YI$g}6BE(f>f#(7oEz*)o%VWgfIIa*+1JJxz;hzXDKn+V=4UhA>?`0VO3 z%m^EOtpp6OUIr%6@>YFA6XnIN`j%aF%OWcB_%JY+UyMr`CTU$%ZRP!Cd^`4@(T$?pyf@UJyp|>KdEMxzkf!oOFHF}hQ#HcJJ6>J?;IzM zRH0uKA~Zh+(0d;&Uah>b;)VKusY;6uTw!eIj+$$%d=uxd4F2N!@La}%lEz_u>yf!Wu&@|$lfKPm?4`PfuW+(+p%5s? zxWC#~J)PF6y(8@fmt$v}_S>`s;cYv%pQOasaP&^08~*CjdchtWuEk_v2lto~t|SNIb+yzOw@|9@l=|T_7zgP!7;pH8 zih!+!_pUl}O{WP>F>O7kG8g#y^)^{=|G;KfR(syQG-FNY1T8})!xR?h;wf4HzBAUP ztasgWmftEYP{INtgND0TD}OuLThBi)UZ*Oi4JTsKX$N5vtqv43zjHNKbn4um3J9Fk z5lhwlOmL@ofO~ddoPN!$LuGfV8qe9B;-tgD<)6atpfGCq7BR`dd?=3fUb_$cf}Si5 zzozq)@I-&2tag%ad~v&?^8Gd*o;N7xY>UfM(EX2kV$hM7vB!+O6~D`HL9x{1PP_F4 z89f=VAL2V(``_b6PRjFazD<>@r;cS7eFxdWnhSKq)$E-dtClcfmJ zJl6{oXx$U7yUukYu^bfw&l|G+lGquynQ-A1oi#vz>*8&P)DD9vGKw9Y=%tc<=0v+f ze7{KsU!9@q+v=bD4YJ}t)Y|rakIL8_8}CpAA*(e#*qjMKj0@#edT@YU2#|vUo92&K zM1-W_#|ZZ;b;=BC@6fj4`V0>LHNKy^!t4**pmNNTcca^ejw;8^j^9r=owc%lvnB`Q zYLPV=`9b5JrEuNyIl>2I`+7qA{WR7_9Y6n;FE}a+isLzA_Vh=mbc@QEe2`PFR9v2V zik?`c9%TMhIeJ+H$`UrzxFRxi^^DR;`qEZpop~+(16y*aN;f{v^&3FH;G{A3TlUMh z&E1R=W6=6eRaGiPxL5!Zz8Uts6BmZP&Bg8|Z`6{-+6RX5c+(fQOj0?T0J0>Yy5pCM zto&?%khS%1dV2l1=!79bNNwJdenC;s_X}Pgp01UZRNi778#xjrOG`^|aQ|_U^cM$P z?|7fMyriO%+obq`^G*?G=RbCl-fFQ&M0W}UFSfsx#{1SvWa_SN53G=#!3i1NzZVv> z#l{VsbvP`l!3!nD%>=#q6f0488sPFL^bWN3yZoq2{2NqxZ$TU-sHr>V+!pXf>0n6= z*F!Hdi`=U#=*SA;JG4+dcmL)T~kUq z8;?!D+e9D=(^4)=og2im9|X2N!Ok-*11}V;PN+z)%k!^-ZjT+BOTWPCd`McYHBa@D zSgpK7qD{=hR!RG=2hT zXhfbpxtCKqy&U81zN|4*7&9z`7*SH0oE%1zSwDKoKH*O&h3`DTXf(gS_`dEpKCIgC z(V6n~Xx+8aFSTM+;Qhv-`t^fZy}X#~u-FC1xud&Czx9u71K5Dhr;hjHt-7tSnQv4* zWbIQcxk8qfYEa>B5$o69SYPda+bvCgKMfW*=%_`Uz@pVB@rk^X>o5{)l75Q zrhi6^Pel9$+(SkX?#Lytsj;xsY@pieS_$((Pw``^$1JWp-TFfGi^PkuW9kB!m7NXR zhi-kg`r9umFc8E=5;V*o^{BNxRH@g<#DSq|ZZD+R>?72e$G$x78i0$7T9}-QL3~+Hr4b>n^ybwa&wK@jU_V9X>Lv+ZKap<*4ZbHx zZG_m+vAbdskq`vU&CY_@W#rfWEsv!U>+b0pS`x8wS@F1YO!lzi(!!9siVC#wIt(vE zFP9B^6@-?*GpGj1jZ_W9r4<#E!bZq{MIA-zcy*y@G6 zJbs(1j_--L1P(==FgKm4I7+VNN~qXcGK)8WU=|4(1+gioj-YUFcUAGEZIzu#fJel8 z`mP+ny+A}o1UfzthuwMjnPXEY2ao8h%X_(1K9vp{n*+rS0^scnG+`m~_1E+?C$>F> zQvzaIYne2XK~NR+0zF6L$_6JYyZH%wt^KxHc)u*IBg;JD0;`W>T_4LK&kHF|_vlRRMUWk8PA zIriYTD~0~0v6dk}wg8(6Z(q!VwEp$+SPuPwv`{C61DkD&WbepO6mVQ#ab9TRtIl0S z;gIP7O0m2%3@*1@*AcI%SYNyRqWnfB=khUAUH&i-U7wuZ+G*G6Y zvQ9NCQW>8Ex(q1a5zVKoS!H1YSxYU$n&49t+^@sOvwXka;h`X#TJ*Y~I2@-hW-&6k zt+ooO)$FZXKxQ{E2y1M18n~M;6*tu*Q&HfBA@=zTq&?y8mjm`;C;N&?$@BH~8lF-G zO~^#wU1T=Vpa9&pRap%ULw@mFFP~68hCjag`mk!c8=^B&cFAk;{Ci@${pCxns^q703WAw%(~DU5aaqaxctAbK zyINg+c%ygt;s%r=ke6YUBfox9&bMGm!9U%xSlL5!^!Xt-U}BKATzj+_&(6Wj%mL~m zrMmhV0>tq&r=!{N!MS#(r*#J=FP(X>5|G`#gSU4`Bhp1QQA zCV7X@g!4M4E-TEQ!R2Munt0Y}mnPWbyVL3QUBk>0R@TGGyl1wQU5%yMqt)*+LiZg$ zSO?hQwV>8EH;ZcN?&Ngd(1u#-fAb|*yOG7y)M$zty@lXR)&ItwNJL5%p{?wp4S(K8 z(A&rF@Be-~-MvEIk;|CQ^!2Z&g2X7@iZS(V`iaDB-a|4t&RJ$~5N`cz8^s|cE?;Io zb|I04HBKB;*Nm)xC)snY*EU(B;*4IuCFXJLv3B-f$##2&mmk__?77Tb-9{)0%R2$- zwV#7=Rh1<}{!VU?j(M1+Y=_L6#h%^B8Tz7-g*D2+=gfD!sB19q{LM=tZVE768r&k` zY7LeNyNnKA!Fu>p`yP^kgzEfTt%PIh8!tnxF}<0?_>zZ@2{xU2CDloG%0u}b>lyOV zBAb0lq6D+|@F-FjZgx*|#@vl~_|E}Js+4k;d?{vald;;kBjCI4E|NQNLqtep0TwHc z*|rIn<94~Dip@Di?{o`9~9qM+?ORnG#um43y=3G8|ufm za^>dsLyW2JdkAN&%8E5v#|+*xt<#JYb@~SihmJiuNjbhZ@fRqRKx1>CVA9f{stIg$ z_<7y(O2&IuJbrqN-|D6#%W};v-pAoZkV@EFCJi(j;w>+6$Uk_o4nd|F99#b$~y=Q$6yS&7NBzz6ZJb>Xl*hA$<*YCbX0R zuACp-zW632pryfLIK&uDrOmSZ&(DH&aEo&8rcVGP+6bi+W*6?K-IYK~chaJ4$RU%I zaP?N(O9oRgKe%rFz|#(EQQO^s*uVwBcw8Ce*lNZ2a#%MjaIC(xP_=}7r3=zj7_Cmv z`ba7mTzIa8dtvY)xab(U^CclM zBq7NCs9%k_G<=#G0`k1v66g^=og}Kkk!@*|hX?CwSsWmDOFwv+^8H2F`h(0euyA$5 z2741Wr!Mge&JvDY1rp-10qanM&v8QET!DY)D3r20yVH}mO^lShs%FnLkI)TsBsQ(c z0G1;aDHSwplXaJNgP#R3nV6~U<|qK4A1}u2VToGZ;~^X36RQ|s#5oMB;wVDY9376X z1`i%sm~A=f=Y8zew3F7i3t7*CAd|}C_U}lYy&y+roItehO1V+B26 zV?jh*TwYGj)V>8{YN?Tv zoB`V)udr~&C=KX3qDQBopx~VNrGgyvr-uhOxLY%WBgtF7$2f9Ee0HD%GIVB%nKC@| zZLQKSkCL)Iwaa+**jZhs7VJjOxndvA8((myI9m8MPoxYLPoGJ$D@X{#5)1nI zPdIx{+Q-CDb&xL%pBA+4=BVH*K&KTSoZxQN4vL}aoc_eC;Q}h3cOHSu4NsC@WTw~J zz4zP7_0_)C7N+Tvs*OPc&vOGDqZ8ioTZ9q^Lp9=FpWdb(e3u?~{j$;`;`b|3OAAc? zmTLh#J|{tc{i~HfBJuUJXVy6A1_0PNzj-G1?Xm|0{U%7bQNcGq{^=1US)o3LAjSir zPC;B8n&IQm9w9$&?S5Zi;)6JAKv*K*|0MdQ{-29~-vGS?_@@pZ7XUxS@G=OhX-lIE*bOspb+FWEv`YyU+^pZ2`f;{`P}(Yuc8!g* z;&>#2g299e%E4>$b@Ac3`jRVIm{~s`qzyI7O{^Tj0e^$D);DN%pJ&&aA(QGHlg73v zS1iOicgUb6Ky?Sa)D!gf(%IUmP$({tBCKkFxZwfI<&Eh?`FMncq2X{UnC*3)!2v#T zesOILb3s|5nWms9w*n#NCC=1@xSEVAuPDz7CjsaX{-NQf)9rNga0X<30>Zp_7zogA z8ti86l-T5ew7j~S&0p_9_X=EB6|XkBYY&`HA=7lj0iWG^GLWOWI!Yr_ev#vSh%v@f z!kF%6febWhDV6X>OSzt(mSm46njvS9_~5%b8P;kHim9zjqeZm9dll?vs{k z6a=PY(#$#dWol#^|o?nA_rH5E;Ep8c2^)4r`{_!p}x zqD<$@j1*e7zf~Lz%uZ6q`y1rc9lDFM}YnOuN z+(Rxyho`KgR{p7@{4^ci@mXeELVV|A)M{=kCzlp#x&{EGIlT}AVv6}XIVq`X;0^=b z1}db^b{p~9G8+i$Pc#qwBpxvljjm&U2EgGB5qaVv;QECIhi) z`pL@@fzST}>FGo@iYtff8+}aWL?zl%WB3?DIm3}$Gts#G{jb4Cm*L3VKa(No=vZnD`zos`?`CHgd23!dV>Kn zS!H=7;tm5(!YK6hqwZCkte`_!HJc(b9@D+rLSr-6_7dvgVkC1ufh5=upx}b9*3&b<6 zNuqC`_>C36jcLz8V|e(9s3@DnVmOTdAP6wh{!RLc&zIO*qY3=3Fi79EqbS)_bf$zJ zJ79oT;OUS|r%%Si0WtUIHa=n_+uxZxknVf?ylCd2;`|$gEMA=u6|B~d4-0+Sh0LQI zvOIMBc{{VNcT)&`o(3Y=%|p$n%~V^SAy^**j-oT2exZi40 zWk2+w$WtW_G=gt8TC8mK^qu!=b%`VQ<30`fLH-XHpyptUjL26h6A~LQWMQzhG)NS( zNTkR5WBO+nw4Z~SN;8v}9k)T05th}}#oZ$m7Z-z$-ET1Xy}g+DGp@?VIZ#HK+5!4kI`C|hoP&=aqZ1l)({MA%luDpp)>AilA!juQ$y2w0k~m0<5n}lwg1~WE7M$ zU}tArTien;FE1}ACno{|LYpE{iP-n=-;C@%{~&q5(}I!Vm2z^np{Px|DsG4)yGTf` zCcmZ*xYD1kt1Y%C`iB4Ef7$-B2WN`T8_6GYN!-|FKq+)qZAVdQdwkE;N!|YGOG*^wIh>{yk4{eY3BFN1g zm&wGBp8K%o(#3gsbwFWA2w3d0^4H5rmUKxRW(#$45DAh049fBp5%``{G}?66DP7vK z;h)3$*DxRwAz~6qDOtU?50#1t+>W{K(2%Fq?cD-lWH~7$DDC-vU1qfXw3k+XmO@{Y zc_6vAfNXB1L2! zya@A+7#3QaelQd=uFkJ_h->7=vjQrH`JAfQZazuolTUMPDLI$j$K2i6|NAjrmA-xZ2BHTdiN4}rBYo>zSyAKR$-c!8&C|8o z={u*wOw;+hISol=X;BeIH%+$$8H>i!o05Rw{)~u$TeGoVzF)KtC*-gAL%Me%@IF@f z^F4h3tu~l~{R0V`>bS4#xytIb$ydagIVEnD!&~6SHkF)Ddz&e| z(ho_dYcHD}yoa(hahd}u6;Rtlen7F))a2yF#RZGw87A*wPY)j-U#>*y6mq!hO5F^+A?8Mb~ zsJv0e%7lEl{~faz9xf68!R!YawV8baMn>co6p+f|N>52a{{RHf%;;zt6&01WcRP#W zpLg4y`sqT8)amwzM~2jgp^?I=QMe)H>o0wT1bxs5B+b zv<}>%g6}bqxa#8SNQG!)>@9s7cLU2!M$Fb>h8k9wLP5XYZ@1Y%4RC?_p(!U>F)&iK z(!qB8X(h79k`65NEX7diPmZi`Er(loR< z7de9CXM0E)mmvp#t1G6@0)PBMtArF1DT8U2wj+iJ5C2(jpQrXF&+P89T+SDWeF3xj zdmsYVUa-lhNRTsBLrO|>kNu{;F*eu-Ad?c$YpcxI6Oie+p|4#mVX^p|E$U~E|85>+ znKBo+3kWm-9~wNX^p~j4-DspSrgG5)(@%r-VO#2#hV6ESNON5mNh>%DHO{17FPOxM zK?rc1wT_c7YU(Q%N8XHL>0(Kw}|8W(#ce4UEP(ene*@!JXwPqU)J-zSVCzuij z#t#D=@Ivqw7njfz209=XJ-pxAnJBinq-2;)B1str1R|x}Wgvx~6`CRaU&OML`7cua z_dBmUFXoT+tWnMxj|eE!EJ}~t&ru&Mv43PzfaiZ8@c+Q$e-poeZ^*cU-Yb8IibSxX zAp!&GVE|ytN{>S7NcgqjWaA73vR;Y=ZlDM>2#-04yb(HdIw{$jT0e?bPY;@^ej(Sk zS)`y;K_Wv7>8Sr?Jz4*y#e@f>3;uel7|jj<)9^nG@qgsMRq*PB9x5yLH$3|%#RYYm zX>du~EH${e5iUrDxIIvC9#az2l29OSA%6^{EM)#8Tv*ERZ0#X4qtLrLC>4a?Dsp|N z(heiGl*zV6ewvj_i6MiYon+{;KF}UA0zGetd^wzP%t%FhNBklwG5ePcirfCC=VS6Y zLsFp?AZok3n;8k$7|nsgwt#Y_545qHWZ=~h6H&KY&9I<^Y7$SMHJOI6=-i;kO$;Iw z;Vrna916=k5^%#8IOyOhaE|iG@OX=+Bw*4cXAEB@?z% z+J>ix1Akex7cCyeU)*LcFX*4aOWTU_8t7qT(wl@V6x2Y6Ik3e5t43?r2%FH9HWlHJ0##Eb24a5kLFGha_0b`rgERZBEM!fJ%T z@sg0x*r;+6nA%j!}I+1K6_v1yw_QO&6SzVnn~75 zX5HV(F&$*Wdr_ha!zGk&;rG)BeA8_OP)czbQj+&?Vd#YxVaYez0TNX|Ky z*e(WMYQFsw-QR6Ds}p@bAc9|iqIW$(kF@M%6wKJgt{Gy;Y5s|e|C`0*;(NHaC1#D7 z(??bE!q;m@v<=0W(1IynfE07KD|h?`CXX?TE@wyw-RUuG8XSkts&z)Pc|2o_-ADh=vYifQeZ!1`T zXAbGt;8y^E1>Q`!A2Hr9g>OctF5d#~(VigL?K<~{)fzjk2AqUyplUd7D{}ASiohc3 zI!$FP#kU~xFnm4!!2U%VLB(>W0T*}yE--o18q!@>X<~-OJGywq zbJX)V8x14G6Tm0`RaE0C`|LuozudmD!4T?5X1kP_Na0a9Pwffg!Oi0f$tj6(Vflw4X_FATFBKtA z7un7MpP-3vM+_Sc(QXZ#bE8^(6J%_xLnoCHoE8MNLDS!?+N9(>`XNIOR;w z!e2~$$K=dgUv64Wpy1XE=F>jS!9H z-4t1IK9qreZe9OCV~{O8_21xY4VS@acFj*Ju*q=L!->Pt_uf;IoM|;rHtE&(iLfYffzRQT2My^mP0fzw*5|0ph3?9!wNjf{UCR4d+gQfLa!7Cx38OB-mV7*<2Uc9aTy$8#YV9&32(kF|!f>epYl4!gNYb?49%yWDZ3nZmB!9Q3^Fm!{lCx3A=BE_pO# zEp0a@D&JC!WeU0-FYiTmYdF@N_SwKEZ#AAhzsXHcLQR%m76F~?Qd+`@nfAW05I1L) zuGiZPXJuMjL|5<41wF*-U&JPo#d1e<006{FNri#1uvgPCY)Bm;DFED94Kgx=aOPcM zMu#G#HX7RFk>}Bkw)bu)%!jMPol?-g;)3 znQDw}K_9ycQ?9u%Oq>LQ_ z^jpm;Rlest0RcPp7-AsU`YnU>k=C%)j{_bin*IBV z2B2Y`q=AQ1Nx8JJfg0!%MG`h5K@V?#burj42$aMSn3xzVo^v8fEvrJ(;RfcuIIG=h zULwB1IQasqr0yD9KB1`XSR>3xxjIMaAhqpy{_HI*pttp{Tf=Djua;8szM9U<{_mEq zwIB-Di;nEp*`zu5kK(2Dqfa<~BF-x6sdfD&Z}LzV9Sy5tKXIda$SG<{u@G|590r+L z)*j3>{T3KceMz|uImOqJF9FebUy=ZrS_2evaIStg6T+OEn z8Ak*T@6rgPS{|K9m_qCRV`9xtZ1NS?EB2Nj%w{$^>K-I5$9I_u&$BFe7_DwFtWY!d zmHe#)%4}&rWrLc}rxm4h&vwB!s}Hd0nWPQtUy43`T`rt0LJ8=*!oHvK`Shiy-!a_T zApj#!oAqJGbVa>uFaI)#Kne2uZfxe=Q}cZA6S>V<7J6Cj@$$-&E&+Lu=iSj}Y{6Uu zGR;PVQGUx_Q{c~HSTnKfL=;rWCQCSZk8gLq`|ke&Qn?0w=Z8q(B$2h0#_wqx7i7tY zyQZD-`y_xzgV7l0S2N%D*)6uVgAD{9plO(rE4_ZJD2Y7wj!_q#uWKLYqi@tV0gcz8 zLH;#E5!Nrr^?9?=r0};Rrz%~TudzDohTE>*f*;30JWovDl20It#@f))8|(D4Dj3$> zn8_CumWD3U!atV5txzu zc}!@%mqt(|t>WymI&R2Br#Y;%_x`@6(`i=<#F7tu7&Q=HsaB!>A6T&>38L2JQo~v6 zUQhE+%KHGsmpBQB_cx=o7Vp($T{VoB!iSB(zu>P+o9-RN4JR9@S@vqK2S!fZOdM2f zHCmYsWl^DX8C>wp7-85OGi3SEf30h&>Okq_J&S*RJ4OHG*QZvVPP;Z@P<_W1n-RBK zd{D>Gv4q|T?)$&1hB#2d*wuyCp!#vSl(wANlT(L?-KhlOrf=``hXk-V6ysoQ`ejyu z=D=BAfE*~OT%XotVv{`V3YUw{YjMXkLjh9^xNuq>^DjzYEgNKuo8BP`yrj_D&7yM;?Z2TD~bIrKQDZwW(c0a;3pBBZ6k`oD=Y1b4{r`C52@U!|QE3VSwU_Q;V&EZoS z;^Shu*MDc&|wDQlckT;B-=Unt?PVJWM)L(TY+kE2YHg%aJ!*?q8&)3 zghWqiB5Ltz=YfjV%1Qm{y@dIV!CBmyco-{Osh zLMLip@wcgB`<49bfyov9=u%GOlZN5&hlt#IdgOojL4Hxe;SEL*R5kO9GMq*I<~flf z`{47giatVr+hXN*^6fEWgL>>eeehQT`eD@tjox0|3iDbkwz3~7DIe3x1X)8xT)49# zOS$lzqM~HvR7PM!eDLjfsq}%T6LCkqDkf><^mibpiYk`mjff8mmCAIfydo&_M_}gr ztH)srGHX=QN9=Llzq=pU%YLT^4~E4!%_9pZWVe+i658ZX$2ua@V%95R+!U7KF@))~J5#F(pTUx$3H~WP7^OhZj{Iiq%qDQlbP1vuvw}mTUSgqX?V-|-qsmQoc z$EOOGUtVV+(0mn`D!O^^XZ*%rj(lEto48zxZ?F>*acMTrkq~BL9SXxC`afTB4Px}6 zu)emVjsxq`!82T@lpi?&OS_^fI~asVLr2}Kl!SxFZTFrOx2GqAM>VnqH(Uxw}4RZS6|9R9d`< z<3;bv&s^zlugKaB zVpnJQ*BHx~Z`AZSaDg^bXjvp;x&?k9?l1#MK}-j$p8!QbDk-@^2>ovtMHipS@+BL% z;V90D<-=}P-Y~|ITe>&zoJOSdnFBi_dj^z&hEP!+!-^yHI|hW1S-|V5>b#|G_ZhX8 z`B^bA%G^y=NG-@ybFw~O^%|J_!y%#B%{RX>+6}Qe8 zoBXg5qp0copcy19Bm1}YeEo7Xy4!u_7nK+SJ~n=@r4%Qn9mI0856Ail4JqPKzj znVWhF{VqzMjFHPm@-~oYp7KVK+jiKyya6bgwC!El)05tSGLB>{21hxs?;eZeWfOJ0 zBR(yhyi)#XM)EW9J-2;lseMh>n+?cZ;Qax433twGpE?hI9;Yt9J_}T0suv%|5^>ijHS&3=C=k6ThBP|2I0J2Sv7W zC6Xb~(ITlUsK~K45}F(tJ+zD*J*W~AwhNzuehxhwNx%gw$ zV|sJ`WaK?#w>y_6Mau&sAoUqU-go~SJ&+b8G%?-H4( z_4oWtlM5Q3XhwQBZ10l-nX4~@kn-bos}n$^gTrvw2&6bt>#4?(e91P$|1Y${^XJ7~ zwd8nP^tp|6&1#!@r9NMMX3{2HCZ-e9ieF zR`R4>r@aLphVP2=Ln(1ki8rET?oGSGkxk|RHOSM%K}Oojs|)iH#rmqp24z?i|Bz+ySqv| zdRDv94a-LsYz$wlW1GT)-LiW{c(c?-f5Fqa-BOe*chm>t3%RZQou1x3l!=B~4cC<| zM~~;3Y|ZtYxZ!PDtDOlnI0{~R3-%Xy7TD{!3Mjc56hzY$9xHP>&#DHOXdfhHllgcS zpa3~e|7-A8UfGg9z}5#thSvI}S%05ovl*47#(ij^+Mu-kEAX;;wN#XRg2`gcjc>iNtC!g?DP4r} z;B4^J6pv%8O$HpMpCJXSrhtS{Snf7S#uPgTooY?bogxDi|N+ zYE{3Xm76b|UZAxyG1U*E3R=Kpi9?^}aDL`?s(DW6l+tVT7YIJhEan6vZP=;@l+?3| zYBd@$5S-L6>@{8oPtFi-zw$d#p|8MvCC>7o0Oih}#>s~wI?HnJLhy}Q^9+_$o3!nOn{(StI8Mg#i` zV|-2KTQDZ1_E%O}2rN~3l^;bGIgVJudo0D`hjd#K} z%Zc1OM4mcn4XrwY=CdP5nz^02>%sP1TkUzxB^h}#v-y?NWE2e9AqjC&dw&ay%|Dab zQQUDleHa)Tl8d9DWcAmdoa;=B-7MIPya6c=Ae5u z+$$>!5`NN;m(et2;dOG<3C?b*;$GK)Tb+F=72cvv9}DxMakqYeWf6Uxc=sl=ki7o} zj)c)1alk4yy1ME^EFkFY;L@e~{`~UqlpuAzI{Pf?JAW1!*6*d^Zjx}e?cOmQyqC-O zu)!%@nD22{p2ym7peu37?wb+R?`v^w#aj1U)9RjS^kHgKyIc|du9MT4`mPlvFJn2M zj0wbK2J8k2ZxBu<{+64dp>OF31q^!xh=xoVV0oMRO*E@2RHW}wfsx|8I!l6_(Ct27 z>-)KvimFM}&#ONmnI1Q7j8XRneuC#^;TuVsI1;?*2}(3rIc*SmIuE9|RfL+VLK!D< zDk`dkhR!yxn}}eS1%t zexK8x+k47Aal3pT81HP9TAW&g;Lk!g?{j)6KHlE&rCQryw4K2d&s1!~330qs>10kp zBW-yz{0>rnaleeEgzO^whA@#lF9+gwd^b+rI?c+=Jm~q`u_^QyM&uA}t~s^?dwzij zWqpOEW#@s~g?>b$1OX0`{ztt}D-)i^x9P@c^>*io$@Ov_t2)9Xa!Rrp*;2XZ@+I2t zEd1%W!obI5_`Vx;EY2^dIWg9-$R<3foGMrw`G3`YWnIcutbu+EEX<>A+>kGNLsl*d zM)6CVLj%yi${M9ZAN7VCl|>Np;P3D!S*v3OtpXw@ufc#KKn|rGOGIo@I&6p8Q$2Sp zK6d{DS%3YUzD$)M{V884lmlz^eYzLM2O4(RxlH^@OiDVBvVhX^kr5FXfWJyHDJdmI zMRWVc|1E;MKt=HM)9N4P#ce7 z8@w0!Kj+6NXb$V*Im1#C;^5Z7u6Q2@7CYsV{70YPbtj;)pV1`^L;B+f|M)7!zcwh| z4h4?^fF%a5eaioVx07=r8P}++lz)9aXvy9?9R45CeFDjZKMt-K8Tz72HBwZ>mM;Wd zwQJ>-Kd+pK6%$aUdIkERR6I-atM(6V|LGmN^%@8Gr~Nm{u922D@rs{;5dJ{`UU&X~ zU*?%fe zu(GfD2ZygAXIZ25Lym$_epUW_danzqP2A9Xkym2ztB$FWRut?&7vppUNJ%JO@x#C0 z(rNtv9q{+i$E5iOG2EA(vj+_*PNsP<3vamocoXQR;cH(Ca%cCgw$LuDw=w=*WGczr zQW2D?q3Lu#+UVx7yso#MFByd8txnwSNSGZ`VXj1*eU%)?sPp~`?>`X1MhP@uG~=pV zYCn;THBe#S;sfhQKSmNS0oQ?y@%x>Mftnr@Ct_4n*gF_1ZW~$~pA2aSWT|U-8~5@x z9`)=#ZX6)f4rW+9wbnbKx6ky){1&>mxkvIvmzupVKfLf#nlg6)k6n4Vyz)ziI<5v& z8!0!i`X8qD1gP(rC8@*X>?Bu}N{^>pypv<^5AD2lT=2hDo!8xh9ZNOXf?h}PX$!(W zDk-Py$Spl5ew8qe59dM=kKJ$sk(qvJ0AC^!{O_uQG|Bb>`i?lgiQDc+hALi6xel}B z^Tpd>5)?>_%Pf;tu7YiFB5S-gsXeAjX+7R?K+!_y`ANi-{ zZE8UyE!so5U$kjyQBsIirYr_KL&Oa@K5!aAwzX+|I;pz8PT}jcy(o#|%tYTf|cgOB( zX0`(6sPD)PtEl@`b+n5cJ9sr;@IN)uZ_S$Bp}GHsrcT~7Fw1E?Z1sW8d@gYd%@R=I zZ-ayx%S7AM|6j==+V49|e$__if_~K%6jY?0{TMv3FPeAIO_3fF)rzRO{kyutMeNpU z|3fSUfJIJ#kHLpP^3Ft|^!6`@M2!lX$;jBap0? z6GE-UcwmM2bijQ}P2G9dTVPfYu-5@BY5i{M3-VfXjE(CaK#cEeS}a9&-a@--i+U7= z{3cnfIlP1~9_u%&#&{jo6Wr?mq^B6K=e@;yvI!i@7V^{xTJqX8Y&I-zAj^5w2hFAdx5>J9CMTrb>DDqY0wYId*1lGi$K9KAX;tnooZ zt&4(-mQ#bQP$6)(KX~34nM)cL4v3C=UN05xSJ7(=m>*(Ll$YE#M0dDeeN;Fe?O5Zqd8e=*kfG0o%VNFY$;J=OM6_x3FS1Q`)X zomAhu=&NVc^(z{FPKj1hFAYAZ8MchfTOZk-sGi%$dh1OgC%pW2yd9XGu8CRe7|>Q) z)Jh4|sT-|Fx zSy*nxz`>`zKB)r1zYjx`jj(1nJAp6&gW>|6|BU_q@Ky|3e)`$%k6TD$)h{}-Nb)2R<|P`cVuiIP&hz2F!#r%nO_Yw3fclgs>Y?jmqnb(L za(l#9d2dwJk4{Df9SQiLIEadV#D*?7KQM&1Z~69G%YBdA$af9MV2yFObWP|Duz_DX z)o;aqN;9*Nl00Np%HE#vT1vZ^ML)tB$<)_BfVVY$11;FH*WeHfgaXe6zjBG-~&(R3YKf;(8pCJoSTW1^#5sxE#^Xb94vYum3F8(o91mQ|%hwNNCEeGB-b z^K5(OalM8rl_Z7Q6AN(WUJNI3se}V9-o&EDGV{^@*k9MpwS`+_II-{wQdk2MSWYqABa~xTNeVN*GU^JtOUmL`rG%% z>v)Y<<#Rgazr5y`ppcU8a@e}WCx^N)WLhuQMasc82w4tC97e`Mdq?s3G*2`Cj_?kt z-I&)&GcVQhn-3}bEdKq`lct+1OSMMYGC6u;T|)3Q6Ot;^Ae052Ip3L)Ei{fn3tk}E ze!Tj7GXANM5>MBwnaD1dm-LsmSQ{7f!&T8$zcLS=E+7;Akh{D2RfahCv1s&-m1}#F z(e{Q3Mzx2FyK^JAa2<@SV1r&B_yy>0LpW zGpXU4a=(g{#YUOEZ0!6?SsfdA&&yNIWn8x>GT$Hg$bbN#v0cHir|U@p0Dx3Qiso=W zm&scfS(disN|1Q*yGg~k7P?q zy+mjx_~BR<#I6iAbK7DB14jtteYPE6*JY)RDj&V-d+hlg;Ow}b8ea`O4;!|o3;G30 z)dh-V1syI)1%4cj&p-Gq{__44G{{ZKKE~J1p}NVF$H?G8=yPqZYVIq`{TtP%19{HR z<9NAp$0PAWKl?5YM~}WR<9)Qg_OV)rHBbTYD+veE9A6eeQa0Ng>yk2}3Ki}yWNhqq&CkywvPt%6KA zp{fATZs%YnFUNTq0RtSGi0{gwdg??;>bk)1GXnqhV2eUh9pN`0PcL_;PV)gM`K#0I z2RR4;_0P-4hO8Z>Ig622mMeBo0#T&Ggd`*@k5|VlE%30gMxb99R`H8+$ImA=*Yk>H z2>gokoz|MH=#%7c)V)wH&6HXlp3l7TrKR>(Zu9DZe{}!eeM{tE@ zpU;|Ta1tXvpgmFO{!4CfL8IT1Mzvc2xm8=Ls=CVl9&_$MeMYJ7YBQqtJ8f}3qpGS3 z4iJe(+IN|aPCEOei5nJm031!Y*X$`Za%J%=3RFp_eNEZ@0SA?YWNBY{kjcl-GkcT* zRbx{o&(~_vAipoiTnKyE)q3Y}lQd}j0va;f!FvWhn>{JG|^(Wo)d zq^NhGd6P{kfT;v#7V(23?vcK=HUkAG5ig1yA0Um%>Q0x)C}nT_GW(h+EsJ{~ZD7@eX% zq+f!ps)0ESE7`SdoO{}VRl$K+lOFO36v>-XXLLB<=Iw7jZ`U|W+R4G$F7ft6z1u29 zoYabvDpz{{b+QD*$W>0D@WA#1m<}t8Tz->hoq=K4{Tehk!P+AmsUlCW-b>P)QMh8Q zY$&{#w*H;}bG*C@H<0yE_*!DI&Fgol_uzEOF#HX&bkx}_xuo>)P?#`%SzSbNHtnv!hbEj$6pK>Ptr59l;?B4k;|AxY25Ww~SG)CCMN zl?;v0VQpHNU;Jczr=k5m(B_?ZeIN7VW{Y^}4^=>#(kU^hrF=L+B)ZLe9gxeFTTGs> zvcy{RSR^@_TKQh56rd1)obQ>&{xe3UY`hq;A=|Zia<}YY1c|)PwpFVYz(!dxa$G{n#^Di*LW|lNINDH_krhf%BnKDb3zuoFY zK{9@tO{?f@o}vK<3dg!&WCpVKBSB2JC^i0;U#N91ml3whkA1f8d7|slPJXD<)IXIk zkoSsWyst0C-vIREFdkyQLAtflyoO{muSrnInfVnF)URjd(Jg~NbuYDzhD8QPoW>5e zVm&qOl?kZMk5)db<*_WILU3HRPm^HDbsEj@Z!14nShh#z?9M_nY*Sx)a<7qa5Y+N{ zPg+(EF~2x3rt`l2)om*v4V#vivORVkcA1{H9u2D07K^7pvJ)o$+j-T*%Y{H{7+rZv z!u8|f15Fjq55JeCQPnm(VW#6CFYUDXY7#PSiV=|S7W=~GO=H172~Brtq?rnPw0`04 z5?}q%sJNcqwwh*1OSWOWoq(O(vGe!(Xt)C6i6>@(mkWc)faQ$lrY&#*Pe7a>FN^305_Y;4RSi{s+O_gCdMNSl$P&Vi*s4j914t#-Mp84hlhZ&;zbwp5x0 zB_`%p-@e+|73k=y$kOel5bk~3Bh=Nk;%zq2Ipcc%RH%KEV;8kegIDGIGqWD~xTm86 z7oS<5z6Lror090P=%nr*?58&zY}?U|4B?#hfNs`Gs{w#7a%KC_pwGULr1^05AQ6G( z`Lvo7J-}DWibpMLL}nE4EdqyHZOXmW5=xk%nwtJ=Q)4U;?LICc(n>=!u)E$aN@8hy z>w_wH0`XaVJY~)ER*{qyu7iexlJcvdRgiP5C=6eu73qi;zN8YSE_hyjpRWWKCbnEz zlOs@Yan1DgZLcu$@bEy7gqft7gIS-&4Tzzu@jL=KA2F@-NQxECyFKrd_oG9y77tVv zny&^7ilDizaa3>Ubk-+N^;rsrs7oK0-y*3WZ2xvPGcb7HskRZjE;^gmef(gl9$zAZ z%MxtLudrJ0nq%%?$tR^(T9(U*gj9xr77Z5q97^rC&bEYOOPyNN#Q;Fy_8b|3+&-gL zzWnT6#EV0oZI^fb?3fg<@z*k4C(W9^^+-!>*4_cVcTF1orS{L_wz)GCWX*5IS@uVg zz2^rU9Ndt!G#x-0TSp9qOFh@BMK4lXnw+z@FF9<`x(M~JR)ZE6=&fBMA~~Bb!LsVQ znlJzS_&J|xZMJU`TZxG2>EW+BC(WbK%}~X$4@Do#$=z3NFU|Nn06=k?3-AW;E26|q zG~fhu&Q_n3Rybsm_;XF#ygV*##NlwV+xvNYH$73@wo31_Bf=t4m#`?MKo!TAm#Ho1 z*{U|Z%_AHhl6}1!wJ4OM>o8|%_Is~Vgo8@NH+Rzs54F$lbZr{n<<9a>SobBw7aNW? ztY?Wjitn};bUcH9d`pANJ9`XfX!Tp42rT&CP?Z=S+~RL{Bx-?$146gyr~EPGd0(N> z;!hnM!xo?r!Osr-9@e?sCC4jp>=86E@|%J(Ut+fBOKQ&4_w(CA;0^Q3gRL{@A>=$8Znvjr7c9iAz8%iT$|_FTR19sdHJ~dI{BYBc0^mTd=9i-9jU1q zDRHJU(4VN89IGesXo_0PSyjU$wV9MMZ8dE)-;B6E*_d&2YGB+yG_9>NaUK9;l%?xMuzK?eK(elFMQkR^KVLF7N0pnR|&-6L`-9C7tF zya>`}XI9j#C>x_JZO!mTn%S>i4_jX55Yy1V>aH;s>zy5qbKIQ`w;^{R^bVco$5-l| z2UT428odR?Shc^T|Gn$KOs`H=_oA#>QwZx%%+1Edo6bT0NNtaX8YS~RyV==LiILlU zukf@}UgTPfo0=!p64DVpSoQ?1+OLgIs(rm|eo%g>Sxv|A95~rq%pi>KF@%Zq+4tvX zRP&NQYUJ9VA!N^-66K#%D~&1kIkcd^;&pJ)G2Y!Y5YH<8jkrb1&4BY%zd;NaKM|l+ zZ=}hh(Be~`x3YU;Tb8vPB2v!}dW*w+AE63$fE<#8TA3U%(Cb30{Jg~&(roZdwii)6 z$<;{8s5G6@hy>(Ve&AjXh7sscMd*j->3smmN_6NldNoDA7^VK)jjD0KBmg$1DaZKG& zUFM-*H9aOlN&vUoE#0Eb1t8Dws%*d8bCa%3SwQR9~w7yq|NeXt7y1$xWX8z8-C)aka(ztcsGqxm{Qo{vh;@cknP z(=TgmqO>GD8fkZNv((;k#S02QvVU9k+%A9QB{4eh)d#bx9f#9;42#z~!=|7a%Re`< z?efwVxkn0+6A`91wabHB!0pBF$5R-SH8h7~+g=*LvVURUfybnK9Xlh>#BFE>IdSa@ zvQJ9DZMa(u&hL4TCfAuHP>R)U8Q(niI57b?pzZ9Q8T73Px?ZQ?AT?3+n70QToX z5YB5Z=z~b5a|fM+%#7_|@C{n4Urho zwzeEpgmWO}`?EXmL*yq5vR&{Bq?XsOZXB*Nd_kFS?caOKj~2T?o4GnPh3gQOng6n9 z&){N*4hZmkIk_Jp2A#~CS!umr;f3989oFP9XfI1(XXmw2ChRV8OCeXpfN8pXzcI6= z82V#WBkpw=``%Zmm+L zA5}fL)g<@UY1J!8P=L$sx9E&*GUL4s&!)Z`G7`3wK9W?r(?#(eiJ4kYj$q zD5=OYv^sWa4lnE`YsE)d-*PvaWA?JKAZ!}$;w(ny>e`s85FcI#41HYt=UY6 z{%}|KGX=w?w+RoA)McJOOeIgxA4R56Do?q+-_xhxL*WBH3{3~u?Xa%a`>q<0&v#ib z&q+G@Ll=ok-rq|-)B~LUWbZ6)ua%?Q>3Zi#&;gB>>|%6bQP0=ghh$0QR}SN?cM3HJ z_F7BWL`42cyDGzWMLC{e6k>y3&LzNQ!S)pJ%PfpkVT%Qsv;7 zsgCR6A6;M0*@+&KX*RI0vy5 zErvnZ`_9_yla=^GE8^0icYwrncgjzzV&X-Iy`$EaLs8v|W&prK(wJs4MLtU`Q#pn{ z8~|g8hs4I7xY_wO<4?DocoqX|iF%C-Ta0_6>fqAk=2Ypf`&2=6J;$UFRxDUX8&f~U znb$wJv}&MaY>c6JYi5g(dL?Y`IEoidf~-Z%J#8pisM%LIen((R?~pY(gs#%Dgrm%A zh#TF$U)U`5yIG*523OtjJrntOQgOD3UxMXY8KVeKP*h~%`thwGM?)u^!Y?zsQcMz` z;JFjb<-_+~+173V{n(3p7Y6*H>6d1__~PTmeg#lAHI_<3oj9_Li!JK<8iW^HqhdkB zOhH_b5|+Kb?t3G}>u{fnIcjFFZwk!!mbEje*XgMK1Ul)kMWo(A1e?%sEgyb)4uOl8 zF4yKovS`bW*mucn%Re|YH;gHxr1Fv5G>;RZpkRpMKIFHM9_MG)cxvE#dS?6_ycauV z!&I~xjUvA4^Bg&IXY2QOAeHAcFV~wt^S@utcKjSon*_pwmcs5Uhr=S<)8E%HZO6Fm z#<-2cm@oNt*p6QO#`Sv=@(0>17-iT87F2wxzp+nN+8fL2u$`^w2oPe-9`w>;A-19N z(bQuEmTEY9j-Rh+o8^?)Zn3Z5R3i7eslx!oKKG9(OKNng$0L3$xw8*jEoZmhYd8ts zzF5?mGcfXFMn<2rN%93*7K+TW9&c0LbN?MMJaDcA@QMY4ev*Xmy?=n*X9|_@LT3&FHNgp$TyQz4k zB&{0);PahAFP<^TlhmI%E;o|Z>R)iK;l+E4A|}#bYzzm-u1)-vB9mrMX(sur=1_Y6M9*7I zu{wcEvF%Bai2XzDWVZ6S(VL1AYZdtQ+wWFIByd;Pb7sGqz*K3;Qk!~~1PrAITaZew z{kg#vtveX`&#~pUG>|k#rT|cU@^tvcynX9;f; zH7deCvgAr5+sF+ADNV|g)xdzyy(1|}Wv=@4{am+r`kRkF0|eH+6BG%=AU7p`$EVBZ z78Em|@ejJydvc6!j99`1^76X3Z`L+9P2u6;59tHi5ko^m`}+Er?Oab*g`v5LvG(+g zjPkdCBcIX@kq0kJP#MQdN5Vxg&Ch@$Tc7Zc5o_+-zfV5Zj!q@M?C+jmUMAE@cCye3 zyq#qQ$V@!HndzVSINpXQhanNO)#R#^siQ@f*j|}E&AMC}eS@-z`4Yua5&F>Cx%07q zfL8GSd&tY&Zmz-8{sx<&@5;u-#+B}~bzTk7X8CN&sEZF+G+#paoF#QJt|21yD4ntG zwvqO@)ZgC&O(boY?}dCWvCWMf4ZEKbKOn**A_5y*O_-*|rlk!Thqmwc?p&avqEaUZ zX+h^^q>Mlyxs2XQ?aBFrw?=vv=S@}G*PrkhGbuWBC4oHNKLK6+Irh#&j0^aW7%V*c|U-Z8}u8R2_pgH2XO?8`n0^!a4- z8tE+&MbCUY3+f}F?As11;P0d|caNj*MY7Q%*$!fh`!en83eRA~dgeNnr92VvgjnUz zTGW7;OL&xEGM3_}Q-~!nWy`2U!!s@5k_*wY7d`-(uQ`Iy(XUl|lkQ~GtzPaodxKNT zYkbx9?rFTIZL?*e`kB-Jhp&_9b*awol81rnlNE1b@@2)B(LT9->|OOv(f${Z%7=Wr zN%oTH$S&Fp(ng**AYc58tC%gaZ9)UIZb+T-L3^YlJAv2>4Os`&kPk7xJh1nxOj&b_ zvJ5A|OLJ|V*w2_w)4#{kyXiMu`2K4e%ut^{RdU&yq9VA5H51I2PCZxvo7SgY$c6rC zp>@w^X#{dARBGx%Eug{d+?=u2hkyw`wa=fU;^L<8t8h;AKSB57>g%sS>u$BUolJ2> zw*ABqd*O!pM90W@u)l8%jg5zFu#W8{&55Ba)u_g|Fg;VdYaeG z*HHSu-h(Ms{~7@X{P#Zc*PrEo4}&iH{@pBM{PzX&fW7~CBJM&WLiM3$hBAcE$*_P3 z-AznP{LeXN@H{*{jcH$RKpX1OhhK$>@Bg3vm+1cc+^_vk_`OMNrJPuYt|I@w^7y1u8bq#gRB&~Bty4SP5o~Ip+ z;br@H6yDAZ>1E^f5F?R0J*)e|)CLq{YUq`!O8T%|x*j$Jqk3 zY{722NdXWZK);u2dmxYNh9V}@I&^!L>(fd+dc~#gu@BPB3}O<o=;{ojx-e0eD^Q_Pp9q?3m{&td*(ZWa8cgaeN3Ot#D@iptRr+jc< zO5ZIhFS8;PbS0}@7qg0Nilk0>j5Xr21b(>KUvgYIMCF=wsSrTb$GB@PYbWcG1~UOZ ztz~_5sb5X!D8U=Tfi$W=j~YrpEbW$-TkYz$pnkzMe5aiFdjUL9cViz7!^Z@b>3vv) zaBmK$X_;m(9c`!Lpghm%|^~yPxdC8-;lJ{wyvDgueoy2 zs|fN$op~9!m_gXyq=;;DhLrPL z#T&HQBXv$t$VrA|UgY`t+y#tC_R@U$G3jcAG1%8N0hVArKV^-_{vlRNyq%Z!4S+zZ zjaG?7E8A!y5b%fXklRux1$r{y$B*SAE6KqxJVlVOs8Jp~5d-on)7vICLM#z5X|>>} zvDD=tG(Ggq*SZeNoSkTS-X6HBeHi0>7kfN_{z4juo;J`u%IY3&vQPM*8zl9!@)6j z&2xg42zvOk^tKiHI}u0PsrePVzMdkRPyrVoPwi29xUDXUSLoIhx_a_t;@zo7`Z*R1zrE1HqN2FCxS|2g zc|*_NueNp!yh;8LLoEf^yGK9HeUTtmMM8V5&97M9u1-S5LLt}ZDlQ^|+rH|QVgQZP zJflPP-zajHVi2zX^mGcWqrKDL+|2%^pqq+De1Y>5CQG#l1Usb8Kb{ox_Ovfa7?jlH zZj5CBJ>oOq#t?>wlMHET?~h9tG2>Nd5+mvS3v;~#wPRD=zb{1yxu_FfgUhR9JnyE4 z6Kn9s?^9xOvtrggo5hV$(a;g)y+`Ij0k3oQGVk{isI;owWzx!h8MJ7C*R{<)rQbGt zBBT3f&AJ;$&60sc9Rt zcl^@FpZOg>Z?9cxUI9EA7$VXI>p8issvBRxqjDCecwu9s&L|^~Y>L4z_t7yiBL*XdVaTU%4_l~D<={N|zw;J2M$JPQrQL(%m7rd*$dl*mPi;+= zS&n9%^vZjJcJ-YBxW+j&nsNx5IIn$^C{YX!Gp;O>D@&62O85lLfdU4NC2=gLTb(!+ zN_3`TRIg?{+M5o4KUX1PG=hvh-M?hxO2*^YJK-7W{Idk+0(~2+FR$;?@>kxcG`py*f97&qg}Y;a-yE?YBRhj|bZ{D+db_cro_1S2It z`<7iin{j~S=wP{?u1WQE2bbd>Zl#BlFossnUnTG9kKa+|)yo%wW}BOK0(unEGBPv{ zQK_j1D=VLxUS&Cm8XIR5n;1LP zLBR!i{FSyRKd!V3qQ^I?ZF)R!PD(;S^aM*8Mzjb1rfdvo4RavR`cN5 zU&#^6+T|qc@^=;#`2Q^;FhJAOyS{DD2`gqbsLss)`1StrLY0n8v&yok%=k72qaeB1 zda=ZyVY{fmS%d!hUB$}{iz1>w!3bBPJ{~FI@wD4?l4x-sC$*PEW~0@k+=%eVVl|(2 zNTXkcus({ghqz>Fe@WXA8ssW0OVX#RQq{-u2VcciyzHLNKI-0-xv<@keH#tR!uyeW z0yI66N1eg~`1G-`u;}osGoV!pY+YBET zx2JEDGGKRo1!Z6R+Zut;C-BVr%4t$EJ$(7AfQI2a>Yst)dV>QDanRICp~-Y5DDC!C z+5`}U^3L0RE-th>ZDZb*xNuwnm$%~#W)r9)Y>TDrHB;{}Yj<$De?EPA7yMj>nAnSU z#POhg3EE=1|FpOHkuX@o&NXHaz}Jcz@k^NcPgn{77;4RGzQt_zyw%=u=z)*I6A-A{ ziWs8zkPEuNC>hXr#+ojG&u~3U#3rOwMh!(J4QtgD-zh6jiL!vABR+5V@d**2_~Ve@ z+7+J=07&F$FKdDhZZZxBkRmJFLz!<>qd-L>vzgn%@a3drtq7asU^DcMdcEuXa%z>F zNi?IUwjm(sao^OUEPK&C;#UJ4@=so8X4;ty0h{C-mUr}+vlW<%dge5bkiigrK!*-a9 zz-4dAIXfgK=faZ(^#MZmpat+V zHI$0ABYHjAGuTeZ*VCymA}&8WjAW*h(RSulWkjGSd849gAK-QGSAC)E9Fz#=g3@i0 zx7&`&W_qsA`R_JN`5)ueL5-Q*n}fe1FA+(2y~@9B_d?c)7gH#}Q&SR1l_+pSP{Qr6 z!J-VuxBXf6K@869XS%*kM>57XA1<2{g{*L7q3Vh_&uiHxz7sqX!OHFTD6mOB|I*7n zt(#sruz@C|tquvXU|ck9>{wd65!;A%grZE;@cL zzN2FT4>_w_(|x$EbV-3sU>&K};z^v}=`WQo`PLwhwP8JaGNG>$?}sLWM*^%x!ok#V zg7m)tZT8di)}7LDqv{T(xT0)r7X(CsiVA<07WmYX$v3rqJ#|6JRhcKghptnhi7rlo z%PQNl+>u3n;led$ayxJ57&R;w*gmr~KDe~43^h)-uj9#%8#jA7ns%gfI9csW%yaWa zDSxO(KoU$g=RI+K*pWvh3k;#j#h zgwSyznF-NNo8U3k$&wpQG@!K_53IPk-J=G4k`1O$$#*q;8cAY`nnxEDB-c(1hQ`!2~Y$Wa@3B*gN z132GoHBDk<^gZvT7OopyHMw>q%#v)^$si?AW53?{qzTREW56(qQpFwc7+kgDHq9Zl$}G+D!fP#0fI&gIhRFMBpOv>$v+t=26O&S~vB!k-56R!;aSC<-YG z<lsYr5YztQM>%P+nT8Cs(`hAm&v)Xay zpli3eModFGPow~{DPzQVm+DlXBH3KcM7;emzuM%KC`bg_U15`*`U_~jPgwpA8f0aa z@}xX*7X)ZDj7%Jvx^;Z3M$V2%!YU)J#1q=IF&dMrgpV`fa5B!Hd z4~SguA49Py>!bOhekPZLHMNO##!V&QRog=UgvN5fGpC~1V1<6OgN3;&!24@kVCNBM zDB&30c6|)er?H3p&+zWd5*BeoFy?mx<$tOTgSQtt9;LZc{3xumO70dgQ8S(Z ziq_|QAZ-s^M&s5+K!29ri&%ay-rKC71A!>NY8L9N&E`Fvl$oz`|%hs|0i)E3cLNQnQ2f*_|69b zyZY<9yumr{TL1tprj_G?r|*-k;#Z?96ct*6$oEKA`H5k^T((mgt1PmBeuamki&QAS z!Tp+TUnGIoy{5Hh)k)vD*Mdr^dCx~yD^^T^k;shU~gW`ZetA|Os}9~!qY8S}4V5*DG@x4B~mAF3ZHrbzCe`tKQJ5v77~g_fxE zzwZnB>Jk*J+2<*d<(nQZB?b^hONTebN!9XDmtY$oAv4o@?M7w?OXTeBJY_?Ww{1&m zzSYqUhI2uBeQ&38dTXPqGZPg?vE z7*F2+qh8J+ev9rO>{(e=VIYTHAN#GJSCGo|2*Qnu`S3CqvWD_Xd0|S8d|ZV+0Iqx0 z1%A*%LibRKJx5{KR$sLFRsIMVxGI3=dHb-$krG33O7CuyC;sq<@cFGEbshJY;CjE= z4gb3ZL=5-%1YI{qZ-2!@w$X$%@6|h#CzENPX?4NgE+_!evC%P+W8x&+F)w^`ubsl8 zVn;N73^@Ls&q)9|knSy-EX9E72Gn}U_|rHv%Z;zjAsp`vM|@(12;YCjI34s$z+0YJWQC=u2V4 zYn@_kpSEwuSOJm9!|vQdq)XIy6Y5(u4JBBja=lINuIA}Jj>l?cZU=o$ReIcR>u+&k z=syg=x1O#}pe5ippQ)pxEm^>y`a>O+`=!+HhBkkvDg>u`<(dtc6@Ns<9hsv1$$r!V zeto%nN+Bkrp>u}*pz7p=B5qxBstsfh`a`N15f8Ltb~q_6RhQzG!^Oklw$IN{4`{8YCLk5C*Vu=QzaD@)Ze&A`%>(>oDow6wOFfIK%J04`T|W?R75Fi zMMLfMi7Dis_3?fpy6fYz4>isfK6iMM;^?Gm_Bs@m{;lm>LpXWkXB-e}{Go%Q0Ggi~ zUq1ET5%x${tKz19jSYe!`$8&1r&$S&3f^RI3)d)6hq-(?AlmsHzC?>HOg$nuCBG;9yc_5&4579r+C~Ysvz}xm0~KRc-9XA5Jmb$ z!Yv?~V|qqLYQ*xC=X)6Xd8>EfAF)^<@5jkOA$Ao@(k8ML3+G$A5ybeAE9@fP z-6T35Y~;(r@1M55&~)eex&-!qMkckau=@$E!9Y`W+_eh@Q^zNRi1K~8v}7UYLx1bt z2z78d3*ThNM1g_y-RG9uNQ~n!X1k?b6Xk`aj%WIU{i+`JkLjU-e5jIk5#hAwIlW?- zL_%*uRM#9O8R+(xth3aw5Ep%WaH# z3}Pqe%l_8)k%FO=`Td~ zyxrOT6EMfQbeACehI#GS6V}RFfnv&YgmZEUPHYYCpI2e{7p7)CJ#?rk7k6~~uf9;D zd?_z0%hR!>8t_^H z23KDHCiXKWMO7v*4l<3voZRyZ3z~)wEYYFfy5rT3r8=I=%H;+c{fp3Tsg?(dbkuyk z+u+^v(BlPM7+MJ%RN{cV%3kq(djo(tDK?bfT{mIB(g>VD@+lzW&YTbvc0bbA6jYjA zlkIZ;#%=%PDB-E2Ets&w`ry~7zkfa2VU(b?rNt|8 zqvZ>_r;%%x1Fi&*F^}zR(A#s+yTK8uV7WN+R#P{Wj178BS=gyC5*b0QyWvP|l*kd{ znVMF`Tnp7l@V_vi6aCAEyx+;TTdgj;#bmdiKXIFTasSV}WE3nONlDIUW0$FR+LRH% zlzDg?KnoA$fgoB5Fpy&vACp?Bc2tw(Q76zy@7MgdfQX$h?Le79qbYr*ls<)ma zqgt&?WMK!#*#)neq(JR^m9{}VI<6%eVf(XvA^J7ucUP0|WhkMIUml-bwnJ3pmY6tA ziO#X^cp_>R*1^WPda3a~O@#sP3Zv?(OHv zEe+tKPd zy!RGqc}6D4&v?u~jEa2f$&>QB<`!-547&NIj74u~xlDn=b4epzC|3}4T7Ch&e9?%f zzh1|ZOleMDmN8@C}Pen^?}9*R+%S^Q*}PiYgO`g z+*I>Jb#_y^0C3Ut^sEhoNzkCz;!W`$pZlS4!T4Hk;N**IsKA1N&qcdkC}u43CYS62 zx^qdUNIF2->;?+#_E3=<$jzp_UeLL)%S9z=j6$`{{5}CQj?Bh+?sXBAS zx$Tkk2s52z)g(l^kb;2Hx^GW{d$`pfyD>+&P@)Q|Re_l2=Ji`Dxnmw2V)0!hRT!kk z?Vj}pPwNeYjo3U+hnb-fvDgeB4?lVoWAQgjuA?kmnT>?aR(q~ZJ8OsMl-unscSS=3 z3rYj5k0C4><7tARg<>rbikG@j%vHJWyV!Au^Z`!y2m{K@#i5UFXDl zTFcF1$O4&ElW5{9fnChH+DhwAxVZD3+S1uIHL7QLeSLix7#Ix(FK=n#Mo&b<#PtpE z%_>*Ay1K2Wb@6G0+waciI#V<(wAfnbJ}MG6i3;GC*fzA(3MX76eoQDRC~#evUQRTc zy2ucKA;{>cqO;X+-h1*ZT=02L zT<_VsJ|TY|9l;Da4YA41DgF?j-$_14)OV#zW_rrJ#o#*UUu~iPrzGQeb|mz2{b3+~ ztG1p>!NL(Z1<8Lc9vu7Nd)CQFk>qVYwC36Kb@t)$mu{eRo744t3=)t>IMxXS0VWmIGEy>>!*bDx)w$>WmVTp%u|&P22SO^4Q z*uI~xUq*%Xgv-{l+AF39yWq8X{-}a+HElLh+}3byHmW-^gP~=I;YxIsvWM>wIb4rA zSz_Ra%wskySo~>*&0-z+um9`N(PfByUvig#=bu&a7M!d9ySKjs*&Sd->_9UAtAqbf z-E}*hluLQZBjNCXRkG!}85-p9vnN=fBFLNML)eq0tc=-lCV17!z-UIA~` zec-?RftJWFt5p{O`b6(?V3gEp%)laFsk+KH^t~cVm-G)lzW%aI!Jj1r!tHU}9Wc+W zJWN-y*@3X$GvvXexa}OvOOg%_FEZKwX|+GxQu++Ot8tQm1Gjd#B!j#8fW`+5t|l5W z)Au=>_OtHKF1)xF3@mPZI~Hlf3-!+VWIgjJNJNb*#t3oM_CB4sj!`WUxqki@j1TB>Jn4X)%4uzL_=yL|CK7wI#;=S+j*h4S5XnC zcjE#RbH9Snd5kip|L}1WegOzzrAn~B>G7HT(mo9jFI-D%C|1Sg-VIioD~pVI-pik} zgcBKOav{7fu6-vebOi^3jlBsP1_5vPC$Dm*wKI|q-$3@RH>o3(mAjxODR9}yA3_g& zx{lYsfJKCbKR%KUvXn>rh zhcCbT2`#8?`}los)q(4%VCB#svuufH!lIc1<+V?X#n7rxNs;h#s~Glq-=2B+9v-{F zYWR?p#48ex)`WuvY#Z&;>aC-GCusS&o`&9%;4oMS_vRF)G6DO3OJy4lyxVHkeG2## zsq&hvln8&-$*U*_GJ=|h2;X^B?na(bn2EBtTO}Bm4(XJu%R_Pp7O?)#}k*pqv|_X66Y_rAU&s*lZkIGw)-;)|AAL# zZkh6w2D-^ZBJBdXMNIsXa;5NFxAse7Tu<}ne36cs9C&=B8`5N@C&Ry++@)*i&G8I= zD)fHo!csm8+)M34WapYY4X!IiA*BUw?%6hh*E9c_agP2kj5SzolQXRTUnupUYfBo= zTrK8X@hFubiB7GEo13ffWYvdiPA)g4;7eL`9D~}87VV@nJ{`$_$9(qYDt@4of~y{#K3ZmPx;1H8A@{%SX5i4yFB5_ARtHQ~x&kl${DUY0Wf0PP*5 zB_pj)$-w(-Om21nAg4?kt;eF~L|yFHKaryQO|MRVRfE-l}x3&C!wY zDu9>|4#(n_o$#4;@70#6O~2K0#vkgO0*N{tyPnuXV~Djj>f>R6n&=j zorh>QWc}?Y{E}?MJnORV~cVDoy-dxZ4^)|bm$Vt{J$Vd3Vc-*D>N8xc;UW=u| zCK-VlDAVe3=LulcjF6D#Cl}im*MN{|m@GQMmxC~p7qSq)kDwheEKE*$KKncWr0mZH^8l0lCZX3V;55IK2b2Bo%$(6K|y9i2A5)TyrZa_T?7fPI`eW84DPK^Kl z2kz_#yRu#jm~vNO7tZ6DGi%u07RuUGzjgR<>~W9s-2>0iz+4p(KG~^tE7HmH&Y6X z+uSP1JjJW0^y|wxH%b0`x^rI17{9!AzGW!2e$sN}o%Jc&*iA^!=hV`bP0h(DD5ftg zh@*!?!hz}Zu$|VcSQk#`^yj^g8*_E0zbyNhQsl@>t(Iso>sSpQ+1kcy21b<+4lq)A zW=E5+-3ugAOMOB-q*Q~mRhw_UX68Smn-5U;NKV=jV)sYMiq^StW`?nkGU%kf0KVM0ChTh+`tZ>oH+rtg zn(q*_Aa;$@Xvph&bvU1$DpGucP+JjI880gx^SXelOQP5x;y=U{bwVq;!G z-G$iW;H$sjfiMieiL$PIsYl{_=S)}t`;tRPJj>SpIf@+DtL5pZKQ<1MkuMYK%)2_X zuEM{q70{J4i(H({TXE{38ZMohZhP|&PAJ!fBloY3hm%demIP&2rgCK6wl9wxH49~0 zbF#E}nMpuh5{y(?6vj42g|ldhP3sBerHlOaDgn{N*G8llU091kDj#*uOyWoG7>ibO z#)k*fJU9(Ze#T~HA4M7w1hQo8#`?kZvs~{XYj5C^n@PMCxC%#cKp$r^oHv&FV#{^4 ztG6W1`o2egXSL+|!K~kNXFAP=^@Bd+@sh;Yzoj=6p8^cOf=b552K??uJ|lfBBc0_i zGmUKe?4veq<9_^=&15TLR@ihY^V1inC*17x_#OuhEHfq@uB^O(Q>cRI(9qHs{YImC zlHv^BsQc(8(CN;p@C`W$M}@*;aK~M$>#VBPmW+DBiBvka2J*Kz7NykzXu(D_+}g@_ zIz!gG{K>wqGXGPqrM6CI{S8@GRH7Ee`4F^Nb&~PZ;9+~=)HM=B$B~yD2HXx$2kDx; zsA*O@y_hQ?%DXOz_)~#9&k#**DJ>ZpO|~noA{uSW?UIYiLKaap z{(^90!V@e(ghnvQ=ChTzt}$?XuCnH?=XKf8x3&o))%0$S98pxAayyvsk`H|Ly0dMn z@ZHc8&4rmMp~7TjJ(^i*&01HHux zsj?vi1dGbe`j=6y(y-m@(2U zowvzKy{j=D-x?6%1{=3pTUe-JHEo5m5LeYo9*mpu>UC`UZ{pBI+nJwp{ z50M@KAiy|S4tSH;o>?gOB7Nuwn}*k15=m1$eIG;1ux6y9y@Nh@H(rFbObhHHx>To* zW`p7f@VN4YiK_;#XmzA-QecM5HWH^8t9x7@%oG<^Mcem~K)~kRHYs&&^=1=^Kc^pg zYHl%)C0Ld@Y>i?Ty=r3hGMEL;#%#yF&SQKNGas_CfFp8N+r*nq@@0hO!_@*j(OwKU zySz2kG$(7PiXT!<(R#!I7cgOml2RLq`ch!q)m4f_`K&-oC2c+O{m^j_M&fb3nGY8f zyS^>#v|5S9;8?2Zk2~*alvtcw#C*6okb&RXugA-6+~IN0I0G@i*5oE|jbEgv())Iz?`+a&k% zn7R7?O)u3{rJ$|?hDs68y9~5n)v##M3Ba_sy3Fh>rQ9FnL1<@Y6e4bOj5lh#vzGJi`5-f{rZzO7u6kp@VI)vH%C?&PwV)h#W%F>09dgmYQju$ON3zPxK`X((OV_=)xRU3|n( z0IYA!H2|^ind{k^eFtcA_wf&9n4GsusY+V;4eI7}%F+-V+=|Y|4hoMSh`6()k8asJ zz8?qG5Lc<)OJ`If-1T>-%~m;=>0jg5(vJk%F9z`d75G>1ABM9vNYFe+^lB%H1F@vn zM%EWDulZ`tkE2vdlJ%GIZoEAAp@dE9m(D!yW2t^;!EyW!t?aIMqy6eZ0k#evan24` z0d-69oiGoS41qRF5HOLSGO0_&t% zvfJ}(C0NL`beIEd54Yog7@0oN^V3}x3yU-8Tq-^vNHD%|#wE7VLZ;BzfgP>nF$<+# z?q)`+3pck3-Naqd)z#5&iPDQ{>mGb3#!M*=9r28O=i2rNX6?(B53Xa0sQXxDKVSLn z!XpJF=;+X}egE7t-%m_9!|TUt;g~}kczfXTQhfSe5*ErwoRJ9+#cMA90%1QPyoGrU{w@#&$#{u26If$jqrm{??=8i%2TM;}3&r{$w zb;CuyUc$>ILRKT)k5C?5(XXFdrO5m%@GI#fQWJ$0Xmz+rxgM+^c>ksau+uCNP~g}Z zVNSa|RLfxF=TRuKVhJFa9b+tAUf>X(Bj>(~%l6jJn)=FvxJiq7--ubj5?aLR4XeBQ z@<^`_GQS)wOz~THFY_u$PQnr-hFg--8Sl=A3if!HgeBk!R*b6aaW5Or@ixfygkPm( z@`IZ{v24d&VVS4}$M<YcsF zs7{qa);P09sw|PxY<2eIxz3}s&kyxMN1Njci#Ud~CPB~FI)`Jm7KKs==}XU9=s29c zz*jR9(s4h?xXw17iXA+(fKVXQb}e!6tzUz(X}*V83N*K!yV-~xk|k9#<10^_boYu~ zUhG^Hw>~TjwW`^0VF1@dEfdJd%O9Ilp#E*x(fMYbowR4COQ85;UOU$z_O3^c94jHS zDG#qw63EZySWsnZhxw3Y-oo3{rLt4$Z}Sf4wm#|bEES7W%?JpS8s@a+OB?6?>1?k9 zT&OAzJq5?L(c$m^)U&DYNhq4gOlA=h(nmIWH9jk$eSHr`IG(bxWkLJ)O$u?YPd?@` z{#%rsm(->XsKKZb-eYeR+#hzf8g)23@Bm(?&~(xfn9?B~3in9kU0%lbk2O5=cZ{N@ z2&oU~!6L_hwB!{<7BlwA7V+>_FI+DbfN;6UN~;9~R@O=d{nCjuscG-ptvZcpW+Pw} zeiZi_pbrgJ<>06tedp~B^41cY9Vs4MCf3v^KP;}?!n$0VLVOna5|X6^}O?nf9Q<}?0d#{g>g8f zaeDrkaB<=QcrzQ(p!3k!C%|SY(~A>_v#lnZA;}*3|;+dx}mk zkq+^kD5T3 z^lchHeRQ`xm5jN0espwlbUrdPo272^jC;C@;=$f$nY(w>uGB2%=S#@oaq9NCc3rvR zBnQbAP}Q(xGSDG9kZC|vfPMbCtCxsLXtSE2`&Zl2yF2{Na;_9kJIjq5FRgfLiarGg z?d4qaoLlt)58HX?90Si~x=*pWdMWNp97W4-a+|4LBLef&tEr~B65sBw7i)Fdf|M+% z+SUOllqPHhpD1_E`+M4Gx(oK(-X(ep7HNh?#&)K`Cqz`*Umk&nygp!{s4gwl87U-V zRhd{)3GWVi7#u733EUCPsbZ;lJz~UA((!OL`kwciZ9Y^I&!i9fLK*LCFG{X6Or#T*q`?Eo0}J zHk7fknRDFVV)n0O5k5Pzy}@8pzCd-Vmaekma5gksTGm>R7vJ_RsbP+xoL;IuetTB? zB|TC}ZHLE3_rgXZya)fdr6ok*cQ#{(XEU6`F08BM-11Y$lk~LHB62 z9zDIn_u?Y=kl0VoZYn?Zc>nl9XK8(4CbT>c&tEFiUtp~A$5$gha&o${o ze$%bd!g%sDpf>!eSOdJ*eR5(zH4tq)DpOVYTq{I`b>OD_O`)QJ!4#g&h%7C!9h7_{+L`B#{&u3M7{m`f*2`MTp$mEr8$ zZd{Q9^fi^8onI;pb$50?Lqud({t5ck7egl~C>XkUczwDqA|ld#HRs4@*U^X5Vr(l8 zHxJ}P(snP`j*mhD0G{ic>oz)fb>^F=8^5^m26m)-T}uUj^OYPxwDh`>z$^KThj;i^djZw5)%KpE1LM2j9w} z0cbw>qrj^3jywmGP>`EBjd=oC5tban0(kD3?t6>T zdYr1?Lu>2eJ6sxhjT@Sd7g)0yo{bP)uJAJ)zZBLZq@1+2AO7>}QUv@sv6&KX&e}!A zPM>TZe*$+8=(_(17zvL4VHeQWT@4G_M})tk(SC%l47Y{PdpzNMPgR9IEYm;?gkRR8 z^S(MOG=i4lk)*J#)?;-PZ;J7qC8;gURn9C z5j>5Lfw}DU3=Ub-HJ%RYv*iyeRNS08(|HH2Ne*Sb_HCxxHab#ooOF6>cU!c{`Aqwt z9>=yAFelD(RW7y`E|*s+9v#BRqAo;xKC-OF1oOW+O>niJ&$h2loF;4ZG_|Y6s;bRs zV)7e#m4po)}`AfAaS-;p?_=%;zH)+JXOK@o@(n)NpyxY{;mZ|<#_*=Nv>F6Po` zRV;#d``|$4KcP7$qlT8hp}xTkQ4h5G%Li}uqVYzZ+QsonGCg2Wa_7+;X&TfMDfNs0 zQfKYJj2FxVK6zhlk^7lpSVDq^rsnK?txX^ne0-a}zJ3?q;AEb*fA@1iYgT?PYpssE zo{Va}rzKnEd+gNRbVRNN7J+=j{na^1Cd$ReUn4?!dgU=9DjuV$DPp60;sUM%BxGWk z4i1^)a-2{QS1&cu@ywWWo1xNPot>SI#_-s(yDJ3`^v9@ZXtGjLjgJv@%ma=#>$z7T z7yCKs7T{`Ppk4iUOwL!%Uc@$Y)Q+7zP=0<%OqcI&&xxgmt19EsPd(Prf2m)*l}=1b zl9slXPNp2p>sgOvLN$8xuK!QCbmhJ%oJYUwag-}b#4q>o&0Oj>!5|I3kH`>?JJt%} zOW)#sk={VCdATJOQ>WST;IvUCPQ{pZ<9OavP!$-Nwil#|gRcM?oi4mkjEtE6&?`?- zp{gqHpy@(Mb-W+%Z}B`dbpk0Oe+wj4T~im2UT?q5j&tt4O)%f9z*p^b*ta--J~Lgh zT;O6mI-0=G0{}E9UK69Q5K2DU?9a%s3Z!C1l_4y+p7HX!oSD0gOgRL{awROMayY8M zB4}Aq7X)&RS604}Y6MHvSb>T)_Vp44_0@he{wSP z1ksnEOArj|Bc|r0x+$$bk$uFK40BhpkwR9wjC%~VY`eU;ajEloY_2sa7^&0tR7k7$ z?h7lVZfR`M7&>x0V!ep|_sO%;mJiw_Ui=Np!}3)#J*RFGe8Xu?ctDLxF{$>ECE3g9 z{0r^`?S>}!`GrGdx`$C>t`+xoFqgcFxli?5uqHN*bvLwMGQ!AVZ{Z}R3db|Ri`c|O0#eens3;>- zau*jD0|NtM;*{;BcQo8pIzK;5EpiYe&B{D3*{mk^mJdN@S{mKf70IV2hy&=D@E%PH z2!AKVJB5(&aJV3A+LH6*rIe)PJNE-kNlD38X03(b8&{pA!j1cMsi7;32lk^PT@l6* z<#Qo;+8hoYcT40i4Y%rh!Fu?a=15%rqqeuVJNRf9mqJGf>QzCw>7 zVPRBzv@WuOJXrTT7C@{#3PN$rs}HQjNlM`p34Lp;S?`#GR|z zSH+UeN72d=`^@x(PYA!c-IK^bgOZ0;bUDJWzxu2%K!uuc8tpMdItcI+8s}$e)W&^* zz}PsuU|nPXgt_N~{nA?t$^F-?5;|o%??c&2vR$@q&i~@VR@=|NAJy02+1*3tudXTS zHkXGz^vjMmZ3`-0&QNx_&7gUupgJ@Ay<T*4rrRN1&b24SBH7ab?z}X-}Ic5 z7<_GMOflda;%nIbt)j=(hG=6wduo;!#+mM=sd0e8a5~UY<6eemz#~8qKWe>PepyFU z7V2=HK2fhfg}c=L6sLZ+=iemikClEeA}~~MS0iBcMfi$iS~fQOD_Uh**9V6hRnkhs zCUz5$`6frp&^=s|8VoWuQ6v`r0@mjCZm=yJpX7Z0ey>+scWLkE#y2P!YgTg{!Fbe% z0PqZHc!IQ&#AZrM*V&V@_$aEJq!U-Kh)T15+#*aWX)EAGr7|&?im1`l`1`O^($e^iH7p;wDUw5!i^Y)T9Z9a$@_TF{qFr&V2=kcDbq01nxHeYY_0O})#FF1-<=)1d-!;tG-Cd72`)Rd z8RDNB4A_|~T)g0WDKn#gz{?FX>k?GN6Gc`FRHs@dhy^YccP4=S86wR6gE8IX0zF%D z?hiA+5%Xk_YH%s)Ng}0%@(Tsx!Xp108Zn^HK9NA&^MeR#V~}%As^J-*tPHo>B@ZRv zI(atDxydNY>)%3U{)ujzD~(9k(DdKOpEqm!8<%IN$p|&RXwhFbPu0^th(Q9J;5ca@ z|KtGrR@!lybZjAUvv{>gKM+$Zdl`T%oYwo@e2=(FGAiPnVOw@$Rt&0Wh_xDAk5$sj z#>P*c6Bu6|-^*Or+3%N`n!`VP2R6Z;3}bZi7PxUu(m=yq{x|&3k($2QBvkCnjJ6x! zNxooGC$K-fzbIACbE&s(OgOcUC>IoyGrl?qANI;t`6*!d{?U7n7`arG4QpAisLkE177 zRa}!mN6YNsJ(Lm}Qi(w%I4IgbJj^166iVSx>BX0bm0=i}MpARuasPKjq}-e#N^zmZ z&#V2$;)s?YKxMa|t+S^~c)l*1=s;aFA@8hAc=3nktJC(ybT^Ae&K(}F%O=6Qp)g#t z#|=760K6bt5tfS%z`_)Uh(-tW!acg->>dpb7meJ@tv~}TqVf+Iha51U!>D5xyFHT< z`8mOo$L4qNHc1s6A$fX!W=g29jF)>V;lt5Z-O+Joy8nVZB2Iq~L1wl+8SRc`Onv4kpaRYscM> z6LpkCwxt8gjSNe|v$LCr)jC2c0#*+{SonfX$Zv5<3hDUam1ka$@mm1v!G6p?5LDaK zI5R0_SFo-4Mk)-d8ZT3T5(BHXB2cc^&(S_IzhZrD_7^KthBT0GS^%cN?dqD~so}B~ z#M#K<@+w^(C%d-AnQs^HKgp#!{5t|ea_D!ryWjKW>p?{Ko%-na;s$ZgNV@O7?V)?i z4XL`;&ES+?56%(J(Qm|s^zWkE$eZF+w2OOas4cg3GVCmwDoOt6%VXB; z(&l}%IXTy6&_AdrWv)?B9>KmHqx1^xusLrQ74Ogj9&{?#_fEcYA1Yi}>n+Xwiu76k zA{A5Au`dYDE}29U%1PMm4|F?OeJzVpv42hNk)hhUs&ZSp*O&<`Xb?%elVd&p8n&pC ze?yleHh@g=;*oxXxNtWu01MpV0z+#hRqxRvuU=2dYa`ejCRqO2Y`Qt<*#h@Ecu)HF zI_*c{6DWWE|0(UQ!=l{w_hH;_r3932L=dFAOF_CphHj)`=%G;okq!xIk&@1#Lqs}; z?(Q03=ow4E8Fbq$vHEY(oKlgoq)~cClme+?Ih9xI3>nLIZbCFfx zv6`Qw#t#ck5q~BtMa~kqzNd`E=*YMvNSi=sgoZ}%(oh)nws)eOm%pp^5Ep-kz^PJ} zkzwa!J96ras=TSV0Ty=qQnU1xk>Xur$o&1D9Ij^FtR#NF<#!)e1+3+Zl9e;#Ta?Nl zes4h3;CQAd*9kbSf4+k!s_$vDR51*REQ$qQ-OWa$He2szElXaP){|vgCLD1+Nf^F=nH1YVzU}uOb{{d6~mb{TwPzZ~N=;|L&n#C|THO0fl{mu_;*ys1Cy7?B~Hhli1e+NbQX^dU^ zG31gR8|6)I(B%JC;Qzk@-USa;UFw+D`B@!<&YtzZ!dQ*#9S1P-+g2-BnZN1v*KOw-@L@{9CD9i{TeIpn!TGfoyx64) zXYf0>AXnIQaB#4trG+%Sy;Bq&9laxh0uvp*t6CCbBxe~3gFSssLJQT_4zEyF87`LS z2;g($^717b5FoMI+??DLqz;I2u-t5TrXN(>y7g(zek_+hcQfvZyvM|&3|+&qp_y21w>{_6D23=Hlrg_IYdRs~>fy1{`raw?Oj&+O?$)`>^Hq_h zTqcPC9Tv=t=6{e;pdqybH(I7mw_Ze%jd??r)uop%Z#X)$`o@dw;^L9=FEp2~zBV=~ z5~(L59cR-8F(p^$+YySWs1a7$Z=FeQxw&|R*;urWM>{ho_Onil`O*LnX93kLzDDtI7y`}nRAsC1yUMpr$!v02v%vk z?^b9iXUfZ}UMMh0k{h~={w1ZOT6f6}Sf7oN%{-*l--G0*5`EDORi+P+5vwdUO#U~7 z>~n>H7aDeesvJp58IG?LE}DtBtDG_CW752$eJ}hytNI(qXG8yByy~b=OaE1hCKCUU zvQgqWmcNq4QuyWFS0-+>qAa<3hefF=7}$NfCde=S#7KQ=?QL<=XNVt+!O>NK&kaU( zWc{Yze{8b)+Gfw5KV68sjRcE2HmeA;S@>yS*v(mZ&}|gMtD4Z{4a6)|ShhQR`A{h2 zuwdADH-mXuQiSqz@xRk~<)8;BDvY;4f;G01j1QN<^-WyhAZl4$So7(%%j2dNGCZl8 zWb-ykox{~2q-P?0N~Fm;O_Ql2>3Cbq`>>;|qAmie7!Y&NG1%;4)z$IEd1~wx=IR_( z2W#i(o)2Mge4i)tJm0`UzcO2H%p(IWyeXpK(HR_u@Tj%ql<)w*wvCKtrh*uZjd zxIebeX7TUdvES7qvCmf4Hoz$&Fvf8RjOnOKk;r_y>+ez z8Ozon>GI!30JReLZ=E~t#>AW-@s^e~v>6~K5q4=}ujTXggIL3h0@(2#Q~oXss_yV0t$t#LY&Da^=8PVuk>mI0UZ7bic|C++EPu@iLThBbbP zAQJz|=t_EXqN1t&;Vs-2JeCqw@&%PbR=L6XGCm~^-|KomC(@!gbi8tFwEG34X0t~l zXR_a<2O~YV49k&bBzZ7Tk%m}}LRbFB{=e6#t9{F)BxGxSk!hs|g_ed!qyq^#-v5e` z2{hQi;s1+T44KD3dEVtvfccF}o~c^%=m2qabb$51ia>Pzv&Xg-(>i1*ena2>b?$-@ zpCkO>SX!*)rJL%vyhGH9uUT%<6pr@l!s zs=B_{1t4?5tIe4r9eY+4FBdq8HEz8H#6<}qwR~OgB&*hP9AE7HA2^!iZ-G2Q2-wFK z=mU-hhPJwW6EDkSGye1fl%8MijlI*=85@QMryZPp>?)tq%uNw|Y<^6#bpR_Lnd$#Z2>2`HK%8)C9A8A97Yl_IT~5 zu9hR_BT}2mkv$fRJcYLg9qzu?r1-{CtK}~8Xh(=HaNgX9V}Im0Ea2g z$;A~HI}*z+`H6^!K(#aF?NqaCCu+-d3XJpRU}=CHou?ofKUi+XZb?!BfVZVl-YNos!lI4 zR@-T6YYc6fbqq-P4kd=$D6ciO#a~Zdj9$7#V3A%Bp8CdQ*@cwt(`76qI*K3zt950v}A4%C}8ImU)Glz_lx&cQbj@Uyek{>I8i>Y z0PtsD__t8N){v#?M|!;IwO!A=w`)1~S=wUVbv|OtgmjMMc|pJMv6ou#IC|nMob45; z>~;$hcU^!ILaYCHErPlkm=vWpX6 zQSpD>T|%UnsE)f=4FqYkRTDEtdpI?h9pnO1^wBqq)5wsN9V;%5zrdRB)g$G9+Hmr> z%yG&*kUjyJ5QdHI?bD*7jIR9Q07&fZ3x!}Y*1P#x?5``X!?axrm&2yw6)k+LWq2H~ z+wVQMy%CtBR9FCc;!g+;BZC34;YTXDq^Xy1FUjqd^++w65 z>T6*4YzeW#b7G!-cflv)XAM*B+!c{_yLPy_=q_jmeozp&H{?Ohoj3|3KhL zmOk*pP%(X^O`mmbS>&|m8P&yZWW6xQho?FM6 z^Tm3U$tkAR%La|Bl>}T<+VbM~Iat4~e0m93X?`m6Uj!q!kntjEo-B+`FPr5%w4w93 zjg5_4#xuHoe0&_(v?-g?rS%wGH=3+^{gK8Hy1ofe&y@AOHBPG`!Lluw?_32_gMxw( zLqK;^VFwu8sHlhS87gw!!Bo|HLp|eFKAh{8&wuofnE}{4O%O20#)gQRufMCdK;~5( zgE{QmW~fCQQhxgh`bW+mN7G`F|lN~cNGrt;Sl8455y7SE)i}&zz554SyX#M2o5-;cN_6*T{ ziy04z-opYWvDLXjwz;&U%_=-Tb&V?MoKF2~JsyLCJGZsyq!APp0&6 zc4wicrarIQz*azqBhG8HnEkVOnF<_F(5jpZU_)o1DrZg^+ zDiie=Tu)efoHwTNB|k8x!N+7TIJrnDEb%xZSGK&RL%(ej?RK*IM#IU;8D>F19Uzs- zV0O_K<;2I^R=B%AS)!r(nr^nqWhLkstey~{c7Kyrce7JB)Qap+9l+E1#2(_<>__h>(vrfL4|KQ1cvSLa!C3d z@_sn+O$(n8-;H5h07ZqUP!y83L<>eIcD3!gnw>S@n9Nx3UYF?b+Z~j&AR9jW#?C&P zTiDrs#)s}o2>AkQV*+@_|0X!^BvCfHsU_6SDd~8fzA=}GmtJ2n=;&a>8J>dbc+Ucx zCjX~jG%O+SKU8WZwt$C;$rnOe+CZ5cOLwK)8@`Gh`9;oWj5g@Q?Ra#E<}XBY!Hr4v za?Hs64&?L*=GTovDAaYoaE#geQJ2ct-b z_`&_?MnCVk0}xQgiv6q22__~}$m-W|JbAv&zS!Q;y@5jbQeZ_4$bs*^c3Y?*r?To% z%h}&_B7$e?jQgX;UKm7F=~TZVGzCen?|yQ0m-5FjC=wM-ZJk6%VS}!WjQDrl^xn*4 z4ha+b-}x(MrwzcZcGXLsD>%-O$YI`Bn!Fr+7p#xh0}tJTGh+UYyL6q21-XL51ex=G zy>XTiSM&gVa&DKA<~8G~o3iD!oos_&!hTmPMeLcpAYT!4z{9_~Kn*5l$$ruEAHe(V z0grZbw9A6Vyw8oB4b1D^B1`f?oc(*}i03Aa#>>*LN;~U3EN90nJSl}wSNosp>xGcw zENc|_T=u3;^n!3>^1QHIFZ#Vx6|X$Jrzs6{$96=VtTl#vrk{x6fPF+w$GZ)7)!OI1 z`~;&W+;>(ucDGiz$VBxyt2!9R2PhlcUCZUg$}WaWq_JGSmP1T=>U|0y(!#s- zOYq8E;B9H9jdAMHhrSJ(K0|5YH|t==fQNT|J__GAWGi+5MRl)u-sl+mnY$Uk4*4q; z-A6@UC`KO;^;3DbGzcsHDx8ExDLSD+xO5zGQteM?KjZDDH~M@OsmKx(NxH>|$>Hu` zG0)p)u88DsnBA$9#J6m)p5*;385B{gufBBX%Ii5-2`;$^o2>PaQ*@^O@h!r@_;@bu zyz9EO_BimN`0AY5!opj?%pf>rh)zs0?9+ZFJ31;(oRpl7=?m#7{)8B9&{!4nyU@+% z(-3|8nqU3S!s}Pi^6J;M<-o|r=Jmx!p%r0fE(~)WP;SJX(tyo6|J%6*fRD)zUv%ahmsgfX1&r&$5^H zlZ`Li>vnDYT1+VJc5(<+^R4xme%zoT&bY415;csEVjwpLiU%ABodkxU_|^QgS5Jyj z&gN;StCB882cEn3*Oo{t&+LvY7qz(B0+^bbLv--7kO7#Z>ytU}oRyl;jyQ@tA6|FBUiN5&orLDXm! z?{Fvse0#`&<+LoIS*lf(RmIaXEE>chX1jHD8_~cHy+XjivPPS1u5JMtPSIk&E@*ZJ zy6}IP2qH4Vud=x2eHz7T%@Ri1yIn9mvcEWawkF|uh*3*b{b?i+4#XCDd8vt?aIWjaFI8b$?og!1r=uHuJ!QJZ$1CC9RP+=c&4SyYtz_^&ahg z@`Ob>$0h#`h`9_(RP!SR2Px8~E%xE1g8dhtDCU`wZ7PuV{riB6=DvuaP|2m+E2823 zh&wv2YkBh@oz%n4PO~%$yjydRv`3v^oGLl`3!Wz%%snEh%~d#5mI{i0^rWc9cy%r( zI^!;H;_@1LvbzM)brUVqd(5ch+ZzpX}>(DbN+^XQQ7{;0R(U_aE{v(B5I>TzSJ}ls{_!0K|o;gjFN+c zKcF~0Y?Quls&hSt@0V0hqnS`^nNdUJ&?tPvT(0$2Lt7E4XZ&z=i{H5UT=u7 z!I<|^+^+!-J#fH3oqxa|zku$q+^}Ci|2;ztIOx9^GG=nk(cUBsBbGZzfWQYhVE;Qh z28Ia$fxm@Dwiy8@{gGq#U(Wq&(%P?w`P)bO|L<|UdGQBp4l5A{_UlxK)!hC0xbrzP zLUryk6(~4fy1Mk~@rTMQ!v2gVH1+4Bl*c|_4pKZTec$AIO^lb__XQFVgkM{~2e_uq z2=4*Ug@j!2to%4Sv=-`b5m(1@89_14@=cC$X-L%L^2Zz=pDu)$Ynwv(qnUqX{T9gz zs-#D8W`?q|(lApB5SZNhD<3`x)`ZQokB}ZaviWI9E%h}=UX^+-M%i83@18l@YN(r{ zn*R&rZAJUpplMinwyWkMb>n=G#6M(!T?!^@5LW~?%8jZ#ot@huF}|nv`8!LS=HJf% zPu7>)xFWY@WKg>R7l+9Bwc+f^i{|&&tU6xR0CFXw^4TX2ZZUc|dD)@K0KWBZzy;lH z;vYU)Z{I!lH^6d$3zYgKpNX0{I!I=hWP2U?se5Q7(R?nJ%q!uGfr2TR9(+#7&bx)#=6Gx;)x1;{J#IQUjo+chT+YS%OdZa78_8{Biwc*K^_k)C4SO zy21Im1VjL#wMMie0fVsPz2{a|+(}J7m#o_5v_e9c>ENWKq!ajb^#?!?xo$5nCP)xD zldCE|bX$pf&P5$?y)%@k%7<)+>l`Zaa8Y~mGC~@aehfj0ydsX{fm>8zSq^5H+J7IA zh9&=93Q=_vuzL|>B`Ko)GpbN}qP|vFpja80^y&h&<@9Ef=A|H&AY{Fy%+xzIgqS2w zx}v2OwZ2=tVQ%o+ZQw_EGAyEi^jWH*psTh*_TY4NBa!X3azpTcGRqEreg-Ch0?f&L z|E_@>VA}-jgKBwBC8rvU8i1EDB{enF_k5r#z45$*%5UZXd9fP$#Pijamr@Yuqn|wJ zLD}Ugu$Gx_^)Uz_1i83ePoNUQaT|EK6&TNc{3D@9qo?sg^PfEIHH4p4wd?%ZegCO~ zBtPw^3>;;&U;*ZTxFiA>Pv(HlV>P~ej|O)ID!St~&YS&%0}M<|-Fmm8 zhfkQ9dwY5c_@!mdmF^r%?1@>kB|*W4+L{&fF|ev8?QyYxM|aVCjPpO)Qoy-zGb0rJ z7dPiwe{f)et?Go(v-O#?^VSbIAh>`ti)hbF(@Z6T-P3fp+x(m3-PRaMZA=CsAzd^H zZ@sI`BAzBcUtktW0i(RR@kmQc14eUYbt5AqIk|46Uu-@~*HCVGsI{}VUiqsz0Ii{u zBl@oTv2nNpZ1r1m-n^u?v9&X6ot<{PZ|vpgD=pjXOfNY!VRaz=TP^L=b-EO$Xo<^S zQA4kldimswiF|c8*p?XEuI~irpWLgwXL4?3tgAjrWbk#Q_$;eE`GxW6fJ5 z=)y)}Gvvz206bat1q8ok4$rQRu;52kX_1ZXE6IIP6cS|TvK73=A_7moysv#I?)S@B zunQOqp1={Q;jvdTdQt!CVu%D>41u43=v;laFAK0=L{SJcX{}s&?ps?mT7Wq$?6(eJjST|rg{Eed9xya{oQWr(tXzhzQzU`PABEWwe&D- z7Mg>$xTX02(~c0Xu!iO7;%0CP|H_)PNO8Avt5?05o}iIi;cuJ)@E^c#c|RXpf^`qn zB`eTid6Y&j$;$vRH5j(Y6WvTHw<9h*@y>|<%E8%=#{Dh2!SpW8`t&rJkMi`zp0hfi z^e#EK(f3?=uwn|-vwxyw7@vzts3QbQ>Z#i!j`lZR0?CG(4?6>k!*;51bIBD>lb6YA z0Z^}FQR+<$dH@cUG(ge%q^1mTafZgL^t@oA(~^KGbA|J_!fU~9pH;q zCe{4bkoDW$ySM1XVIIifNC&mM*Fhw;e?p*zdy@Y<^#Yiqf$|HMEHqO;xh+cqn%x~Q zZy}cZdlx~9pZETT+0oj%EQ@WyL412W(QS4fP%z?ejN=;S6FE%VE9_DJw7>!r7*L*R z&7FU|JY<9!OFd-+1OT8keEut;sDcYnS-`uWEn(+*ahJEw0s!5;1DXXW|ANu}>LK`b zgNWa*%awO(_HWV`7|bOx9bQzZ%J#t3U>Te zG4~gbe%9vpR_?K)^wP%oa!m9hi(~MWlp9~^2cF1K57K9Kl|Q^T@B7l+KI6jbM-TJa zlu4!tdS_RV+-5GXZ+w8dy4^?Y zC%eZV2dLooUwP7w@qU&43f<97*h9a-TAdqK(*HI;>jP9vAxnrGuwPsYkx@Ss@E3?z_hWwX}254 z^R-#8O#w#Cjwt)PWVKy^@T(8Km_i_X5f6F>iIVctB9$W5&~{(aQ9S&w^=?MP#p-2_ z!3>*%ZCN{}a6Vc?!Tp}y<#lj2aW9nM?lzyDR$G78n@61j^}EPCAl1FxG?Ut&Oh=!& zG+J&d?(Vc3CGrv!GMAMS?(9%ME>5N2nN6E&KvjzfF-^WRYcpkrU*>%0cuKy_%Au)@ zROR?!2zEN<;#+S+0DUM<%nH`8xk>@gs}>2HEiWH#SViGCBBwJoi|277xd zbyOOE_*WFc-Id*vBXcgbmz^(_X1Y?0i>0FZOVsbNJ!#MljV@G;W{pNhGylvGY z>QYiQE!X>jb)kNKawxELy|FxNm2fAgDJY)wNfA;Bd9jT}Zbm%>yE*Gi`WRvXr?Z2X zn?&r)H!-%_T1IYxVmc#`+py-3B3s;m9C>rHD_j`Ng}8erR4XBl{ha`fI969u_~E?< zl>(8hVo=-ERFD01QIUOh5Zuj6%WGnLa@SsWde&aIM5hEg$0ymgO#jS8lJFXAhiq(+xookS|d(1`gz5@HY$&) zTh!C#(7c5zT@I01*C3u;#owI8!E7~SNz!58kOV1LM3P}bkTtupJkNZOvQVlr9)at|hq zc10MSV>Lp&Ylx`vbqRlLeMZ+8GZz%|l_B!_oM(9&X`{^h`vXc2W59K9W0m(|YlGs> z&!N;oXei`~+fL0(21#H6+-QVhJuJ0E2^}pfzk;iu7^zO!oBG)dkCQe`^K`Unr{aRT z3jhBCg*p)8!pz^{PQ&x<7kda<*8!=TybDqgaB1^ zwUDSsWhTSTCszhHNjv_zSWR8Zeacu%55=W^Toj(0pO$pU-JdP6zp+V(`CYoBy{c)e zFeb!PF*}edjaz_Ys0(c8)z=li9m(@j$?pj?{msLmrp)jC)jcERZuQ^CPvfdSBDX93 z?1_CUB-;BEqn`neHgx~1vEqp|*FyJ+%4;YE)%{q_=Pwm@ph+$R0m)NsspMj~LvfU! zD<@H%jzU)GsnUsOJTVGdR)v1^xVL*SkG}zo_ch38d!LOM^0-Dg0`i*G^dzpneDNWv zpS7y3mVcGLmjxW@}_l8WS$!6Q{F;1&uOM9RADM(@vMgxn7Cw zrta7PEcw(V(sh_kaZ#R{DP;djplI4je`58lew%;}Uvj2?7Ho6l@{aysi8HC4F32M2 z1*&3{TL-I~$CXP4>Zkk867^Ga3XykyPMbe(R*_HurgNB4JSQSO>^6MBol zS_<27IL7>kK6CZ3Q4?1S;l;J00+wSsvZqJ5<-&pwgssGh+{(7~hk-uxyZD_p?oZPr zC+40HezGA8@$H0r11eNbU%Hfj# zeupZ_PeXl-(uxq5Mbmtg2p`*CXlTY1(MhfzczhN^$SQwAy5ALFpQYkK=nAB#zglxO zg)!r4>&0bRBZ)G$KX#H4e2F#Z8Svd5=3e$3cgaIdVsbdrOQYj}W^T9K;#w3GPa4od zC*m&5cIvXVyZh|xV|18t-f+cz1@h6z>DBMdVy{sy73Qw>=UOki+?ngd!4`F<+x^m+ zzBFnl5)kjxF~=npDl0!SqS5 znFYMUhh^~hOFL>h?BA5dqvK^sm_59w{DCd3MULdnC)D!{)>8>~E$cZ>!yHP!^DX5s z-675o4&gFzt=dZ%-q9 z_R))%zAZNFgmEci7t1`tCo*y7DEn6QJ=XSzKbN8xCyab5&Gaex+k|R8f}90)zDC#e zfW=c$-f=I(_PoNz+OPpl5vTSQgui_aoQ=uo)fE2h2~94}j@y2AOKWQb9e)w6Y1vp@ zYQKnGB|pD-BB#^APS@bO?eq!;a`w&ZN0J4R@XnAlIww#~D_m!`uoUm=jyk21DO}Wv z)n*Sc^AgGWTuT4$<>f$tH?B!RqNeG)Wu%JNhbt3zx4lnRF*sI*at}>8X<;o%0I8ny zV3d+ST|bK5yP;q?S6<|Ku;~z`vL-oyZ1P9PesOfO?u-mj1LBTegK?OYX~)x$(9q?j zr2%ymNI*kFLuqNLQs|-&762^uiN$L&YierhL6QzMKvpGC_Fn-Em7ALz5GyEy(SBi6 z6k!!02Uka@n>`GWnORtVp?Z`z25)n$5*6qkUWu|5=jVqQd+xu@`2~r^gp%Lq<|k(!;}JS`#o7*Xr%EOmP3=I8ka|0jZy=s)Hv4*=m!+<8Pqf zesQBH3wn~0ZFaqkzgzso%qZXT$Y2G(-@A7Y6?MK_Z~JH`$IixzBvrsaB6wlNMK&7B^ZZAo6F}mt1DM z+;(*!_%=c2+a~5b=djyl+fdzWGB(y^vS};hB2dBjHVqBUQ{3Ro%xQxNW8%y~EftmL zXo$nZ`U1h0;i`FCJ&r^L((w0fCJgu?pFcmQjOuVT~g@6Mq!iYJBn1S&JOoaKklkW1;6<*?sPfP1{r z?NSm7(6cwRew5q;O`fSM?X!lSlobw%Gse|ZW7 zSxIN8{OITy=GIqJ`_BI0HgW}$`VAn?nY*7$8Yr+v6G|-YLz_qkG?H4oOQZi5s!xKCAf|@_L&o6N?{1#eCS_aS+TaV3M5mn)?_cL#|JF1}Z{Z z6zD#7nT13~f|ElY+;1H#(oLrCd-?KZR(5umS$Wkt10y3NAK$ncr!ghwZGCc=f7t7j zWviTLV;w50p!;}aWi%m{Uc;+)lALqmiyY|siR;+I$`cIr&zxTqNIXk(-`)8d^xMP_ zwdRBMD;A!Jx$&=rC*7Gh$g_`oRpi%2GcpbSnbKMPT4&>$lpC=6du8RPtN6qH_f&^{ z_jOXvr!Bg!gsR`BrRoD!w1(EfJqY|;O1;kyQc}o){=`k{?Bo>BW@yi@$LAB$XAXSU zDMr8Q9V;VKjWiCTf9WHrn@UIx!kE|Q@{JC= zCqpE`FVD?IUeVKh#YW(!uQz%*9Vr-;%zZNonfF3&GGtsJ$s#QGhDb&fR&xl*c(Cg% zk3;lbAx;v){PiZh$;o?n$4=UcbgOx}xdB&JR&FkyIMASw#qWW@oBiox&2}7t;;K42 z*1XA%ke}nR@{(M2IJ|L$nk`obi1|pi-OO!ZWWk0X_WTiQVr*TlI01Us2)W(k!*r7G z5yTYNdfbtc8}F7Ant{DTTnsbo8Z!%XMZL`sC+%fDkym@Gi*w^oLT1g^9rn8K2C-^E zO-gvIc6&pxbekqsqN^TnDESCBpdRy*hV%6ou`x5#1^`dlW5$`NFh;JEWM~U?JmPRE z?~U6Y=gwKamqVa>6`bjf)DE!XzPenOp9gk>cs61Ot2wInccpt@n%%^OdQc*U_2RrE zpixkVnO5OAaA~zeL9ww^waA9s_VTEQ>s5F1{B<@Du!R-B9yZ_@h1MG=g`h?4*Lv@L zuCXeyW2^*Ld*rb2;q>}m>X5C_xr~vOpD{c?|se3Qixd5%31HUZQaQ`t6@D7KF=;$FBo^5e+3iptk0J zlx=sTYiOv9mSJ_#PL2yNHt@4+8IjNFTS-16A?^HvHmIL}0WE%Aq_BFPzuYw> zYfvZyt=_22evQ^0gUX!LdzdYQ938?r-GVBBZC^1>cQulDDlHX~Sm(~SsXs}-leyM( zqV*|-VfN-YH(iudnbO%smbrjK%yipWf6H+0q9SSjRT}i72gd@2pTdtYCl6wWUJ~Xi zUFrzA?^f)wV4GR9PKzV875b{gvitQUmEDae$5oF>3|aj)?=+rnvM2`}RdEr;n~D`T zK1;hW*V$L;o2iLRDAx0`AJp|vUUy~>C&IaQ(h{>DOb)&D((aBZzC5iT#z{AHby2Jf zaccbPI{Q6~VY$JNKPK8Pl_O0Wt!&jimA$7Ul4L{z#&K)}b*oQ>?O7%%`S>8;W09dz z149`f>+0dTTRjDp;B8k*djKbS@6x)18ULxo=#luR@+~Q*44u3Fjb&P}hioA<`meuF)1@5|A&YaEnz0uNn^q;YSAs*VQ=LMS&{_{mc~Wcas*y)g7@RUijBn3#w;LC+W!$>T^aVeF2ZkrDgW z`nz8{h20~5-=+-nX+K&h#NdigL=Rg$$Y2GZ zZGLL=uv&}~6BUV2_uT(>uih*4+5DxsGA_a1gxi=BnmXjkHglWM=T z@uw6Z--ym;+KL!kakSf65hUIq@y;+ke?QmLG*bt4Am;O9uo@&=Yu&r;qQyBb~-pH}g%0veHENmCNQy+xz}` zkUqn_>C<8CB6bGXuZqAt0ELP&eEgDEknAfsv+?EB?1kK;%z`n28NHVbyYkJ_-d4*O z!)`+kW`^-B_T5enknskauD~#hh%!I#)bC}eL!s*P9i9s#D zoFrqkRXhTAsUfFv6UN89MzKWHKU7uJz}C)8rtQXkQ(l_|@*va<{OGL&yvCYYcj;Ao zs=34Wk@C3H5#ULK#g*OH5ldjoWav?;`x>gv33>Mna`})Zpx6WkHD;DyqcIhz9G$8v zFzxa-W46CazkwWSkp(rm59IRcW3+ETkd^X#>~MG9@AI(R!sf6eZ2@;_i%;k5?f3is zN`3fw7rs+`edx1uq_W70pGWJIscQ#>s*s8w{ua$GlBd)E>M>y`)_wo#>gwTaDxV|O z4l#Rn;{9Jj0nV}<9~L|@2YHjLEU$is_8Qh7tdcrn!2YQD;ru2-jYE?B_@Zx}1zx>U zYPQo<7$ep9z(_IzL%!M5_;p|=geTwiYDa82&;z`8b}~!UL{)#1_w2>AqUs5zfJKrx zrH|L-JA78^A37y#>Zvm~!2~IsbSdX6>!Y~_dhrXR2PRH({O28sYB!O&IF@`L%(a+9 zObS6v5}kHfY+=6R7mZSrjZ}rc3>m9WM03oy3nG1LeULAPeTH@3FOQ$x(XX?beN=E$ z$T3JhwBYne1EPOdGEZ5I^m_T!u(1G^AM9~~!y%FY5)Nj<4bB1r63k4>ja8@VsrnF5Kw zYbn@3Ko3%KLl!+0)5%>-T(z4TlSxNsf8fpET*vKfQr&P++apsFm%m)qglpWRKc&F( z-un?UVeEJnXJg>S;oiazYb6k74u>1 zgmZj(eFFETZ#tSTPS}LTsJ^D3eE#%M+*W!%ou}DJDJG<;;ZTg}gg-`1J7r5!#=l1r z-__@CZEO0u`hggcPgxSzlE1b{B1pKFSb-K-fE+tjX*{rR!$mNt^{vG(9Y?e(w{8>K8B|KXwlr*1=!fi^`a6Bqi_oBaBzM#jl18QP7gwgh}JE!PkD_~L_t%J>|1&z%O zGLl7a|61YM`56v2<@|pC(LyvL55^{Ft&O7a|GsvBqoJ@v~9{?<_1QcN>m@cZf|0|zD)v1v3MtOQS9cDH*HUWX! zlHEyQL0ce9kH6u4A5^W+*l;p(yu0_i)aCr81}ssGwbPv=cJ<^7nSk647t1yHz%N~w zr**hX2tGBZRA0PryIZftOc>gBetr(9*!DAw#pjrJ=)}aD&ruP}$n!$#F6Df_eBP`0*jT+92Wm#f{9$KiZpHQW z^KS6`n-!6-)HcMaK8CCF+wL77&eTHeKq<844T zFg#&f)n~nkxaX52$y?CYXLV1QvZT0JiSFZZ5dZBk!{ zn_lqQ%TCu5YbN2z1D>%iGe!V?u&`)6!*{Ap0ZwoizMTqmY;0_j*T+noZW)1ZeIFjg zuOw_X`rzwP-YDH%Cm0hakE)f7OXtz<#wPTv1_lN`dh`ew9AMt3ee7X_mdwn|u|2C0 zU3&MalljabX>~Z*44C(%GAC`TCT)RNJ*z-K8!bZ{E#pAN4&35R@~g}e-MYL5wc0`Z z+{@>{wBlw~x>A(*$X9eo2~(Gu(7#1RSJwurb2%isif{+cru&FEzltZ)SA_1FrY=;% z;g#DV$>9Ji3^a^0@p~Y$4G$0Jo+VnsNbV!V2sA01=)=1Q#M1WFr)7EP_>zwNB~Ww_g8 diff --git a/contributing-docs/testing/dag_testing.rst b/contributing-docs/testing/dag_testing.rst index 7e311171ce019..0bf506c2f321a 100644 --- a/contributing-docs/testing/dag_testing.rst +++ b/contributing-docs/testing/dag_testing.rst @@ -20,31 +20,22 @@ DAG Testing =========== To ease and speed up the process of developing DAGs, you can use -py:class:`~airflow.executors.debug_executor.DebugExecutor`, which is a single process executor -for debugging purposes. Using this executor, you can run and debug DAGs from your IDE. +py:meth:`~airflow.models.dag.DAG.test`, which will run a dag in a single process. To set up the IDE: 1. Add ``main`` block at the end of your DAG file to make it runnable. -It will run a backfill job: .. code-block:: python if __name__ == "__main__": - dag.clear() - dag.run() + dag.test() -2. Set up ``AIRFLOW__CORE__EXECUTOR=DebugExecutor`` in the run configuration of your IDE. - Make sure to also set up all environment variables required by your DAG. - 3. Run and debug the DAG file. -Additionally, ``DebugExecutor`` can be used in a fail-fast mode that will make -all other running or scheduled tasks fail immediately. To enable this option, set -``AIRFLOW__DEBUG__FAIL_FAST=True`` or adjust ``fail_fast`` option in your ``airflow.cfg``. -Also, with the Airflow CLI command ``airflow dags test``, you can execute one complete run of a DAG: +You can also run the dag in the same manner with the Airflow CLI command ``airflow dags test``: .. code-block:: bash diff --git a/dev/tests_common/test_utils/system_tests.py b/dev/tests_common/test_utils/system_tests.py index 578ee6cc04d43..6558ae2d1e4cf 100644 --- a/dev/tests_common/test_utils/system_tests.py +++ b/dev/tests_common/test_utils/system_tests.py @@ -30,7 +30,7 @@ logger = logging.getLogger(__name__) -def get_test_run(dag): +def get_test_run(dag, **test_kwargs): def callback(context: Context): ti = context["dag_run"].get_task_instances() if not ti: @@ -60,7 +60,10 @@ def test_run(): dag.on_success_callback = add_callback(dag.on_success_callback, callback) # If the env variable ``_AIRFLOW__SYSTEM_TEST_USE_EXECUTOR`` is set, then use an executor to run the # DAG - dag_run = dag.test(use_executor=os.environ.get("_AIRFLOW__SYSTEM_TEST_USE_EXECUTOR") == "1") + dag_run = dag.test( + use_executor=os.environ.get("_AIRFLOW__SYSTEM_TEST_USE_EXECUTOR") == "1", + **test_kwargs, + ) assert ( dag_run.state == DagRunState.SUCCESS ), "The system test failed, please look at the logs to find out the underlying failed task(s)" diff --git a/dev/tests_common/test_utils/system_tests_class.py b/dev/tests_common/test_utils/system_tests_class.py index 836782b8584c9..5abdca96bee06 100644 --- a/dev/tests_common/test_utils/system_tests_class.py +++ b/dev/tests_common/test_utils/system_tests_class.py @@ -28,7 +28,6 @@ from airflow.configuration import AIRFLOW_HOME, AirflowConfigParser, get_airflow_config from airflow.exceptions import AirflowException -from airflow.models.dagbag import DagBag from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER from dev.tests_common.test_utils.logging_command_executor import get_executor @@ -131,31 +130,6 @@ def _print_all_log_files(): with open(filepath) as f: print(f.read()) - def run_dag(self, dag_id: str, dag_folder: str = DEFAULT_DAG_FOLDER) -> None: - """ - Runs example dag by its ID. - - :param dag_id: id of a DAG to be run - :param dag_folder: directory where to look for the specific DAG. Relative to AIRFLOW_HOME. - """ - self.log.info("Looking for DAG: %s in %s", dag_id, dag_folder) - dag_bag = DagBag(dag_folder=dag_folder, include_examples=False) - dag = dag_bag.get_dag(dag_id) - if dag is None: - raise AirflowException( - f"The Dag {dag_id} could not be found. It's either an import problem, wrong dag_id or DAG is " - "not in provided dag_folder.The content of " - f"the {dag_folder} folder is {os.listdir(dag_folder)}" - ) - - self.log.info("Attempting to run DAG: %s", dag_id) - dag.clear() - try: - dag.run(ignore_first_depends_on_past=True, verbose=True) - except Exception: - self._print_all_log_files() - raise - @staticmethod def create_dummy_file(filename, dir_path="/tmp"): os.makedirs(dir_path, exist_ok=True) diff --git a/docs/apache-airflow/core-concepts/debug.rst b/docs/apache-airflow/core-concepts/debug.rst index 9ab7819b8b307..d58c490854527 100644 --- a/docs/apache-airflow/core-concepts/debug.rst +++ b/docs/apache-airflow/core-concepts/debug.rst @@ -122,18 +122,9 @@ For more information on setting the configuration, see :doc:`../../howto/set-con 1. Add ``main`` block at the end of your DAG file to make it runnable. -It will run a backfill job: - .. code-block:: python if __name__ == "__main__": - from airflow.utils.state import State - - dag.clear() - dag.run() - - -2. Setup ``AIRFLOW__CORE__EXECUTOR=DebugExecutor`` in run configuration of your IDE. In - this step you should also setup all environment variables required by your DAG. + dag.test() -3. Run / debug the DAG file. +2. Run / debug the DAG file. diff --git a/providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py b/providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py index 33abc67b639d2..3c008ee5ca262 100644 --- a/providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py +++ b/providers/src/airflow/providers/google/marketing_platform/example_dags/example_display_video.py @@ -92,7 +92,7 @@ "example_display_video_misc", start_date=START_DATE, catchup=False, -) as dag2: +) as dag_example_display_video_misc: # [START howto_google_display_video_upload_multiple_entity_read_files_to_big_query] upload_erf_to_bq = GCSToBigQueryOperator( task_id="upload_erf_to_bq", @@ -125,7 +125,7 @@ "example_display_video_sdf", start_date=START_DATE, catchup=False, -) as dag3: +) as dag_example_display_video_sdf: # [START howto_google_display_video_create_sdf_download_task_operator] create_sdf_download_task = GoogleDisplayVideo360CreateSDFDownloadTaskOperator( task_id="create_sdf_download_task", body_request=CREATE_SDF_DOWNLOAD_TASK_BODY_REQUEST diff --git a/providers/tests/google/cloud/operators/test_dataprep_system.py b/providers/tests/google/cloud/operators/test_dataprep_system.py index 96f47fa3e365d..dad77ac4ff806 100644 --- a/providers/tests/google/cloud/operators/test_dataprep_system.py +++ b/providers/tests/google/cloud/operators/test_dataprep_system.py @@ -26,7 +26,8 @@ from airflow.utils.session import create_session from dev.tests_common.test_utils.db import clear_db_connections -from dev.tests_common.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest +from dev.tests_common.test_utils.gcp_system_helpers import GoogleSystemTest +from dev.tests_common.test_utils.system_tests import get_test_run TOKEN = os.environ.get("DATAPREP_TOKEN") EXTRA = {"token": TOKEN} @@ -52,4 +53,7 @@ def teardown_method(self): clear_db_connections() def test_run_example_dag(self): - self.run_dag(dag_id="example_dataprep", dag_folder=CLOUD_DAG_FOLDER) + from providers.tests.system.google.cloud.dataprep.example_dataprep import dag + + run = get_test_run(dag) + run() diff --git a/providers/tests/google/cloud/operators/test_datastore_system.py b/providers/tests/google/cloud/operators/test_datastore_system.py index a98215a5317a3..8807288358bae 100644 --- a/providers/tests/google/cloud/operators/test_datastore_system.py +++ b/providers/tests/google/cloud/operators/test_datastore_system.py @@ -44,8 +44,8 @@ def teardown_method(self): @provide_gcp_context(GCP_DATASTORE_KEY) def test_run_example_dag(self): - self.run_dag("example_gcp_datastore", CLOUD_DAG_FOLDER) + self.run_dag("example_gcp_datastore", CLOUD_DAG_FOLDER) # this dag does not exist? @provide_gcp_context(GCP_DATASTORE_KEY) def test_run_example_dag_operations(self): - self.run_dag("example_gcp_datastore_operations", CLOUD_DAG_FOLDER) + self.run_dag("example_gcp_datastore_operations", CLOUD_DAG_FOLDER) # this dag does not exist? diff --git a/providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs_system.py b/providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs_system.py index ba24a0c34da25..9cb0a9be5264e 100644 --- a/providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs_system.py +++ b/providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs_system.py @@ -25,13 +25,14 @@ from airflow.exceptions import AirflowException from airflow.models import Connection +from airflow.providers.google.cloud.example_dags import example_facebook_ads_to_gcs from airflow.utils.process_utils import patch_environ from dev.tests_common.test_utils.gcp_system_helpers import ( - CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context, ) +from dev.tests_common.test_utils.system_tests import get_test_run from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY CREDENTIALS_DIR = os.environ.get("CREDENTIALS_DIR", "/files/airflow-breeze-config/keys") @@ -71,4 +72,5 @@ class TestFacebookAdsToGcsExampleDagsSystem(GoogleSystemTest): @provide_gcp_context(GCP_BIGQUERY_KEY) @provide_facebook_connection(FACEBOOK_CREDENTIALS_PATH) def test_dag_example(self): - self.run_dag("example_facebook_ads_to_gcs", CLOUD_DAG_FOLDER) + run = get_test_run(example_facebook_ads_to_gcs.dag) + run() diff --git a/providers/tests/google/cloud/transfers/test_salesforce_to_gcs_system.py b/providers/tests/google/cloud/transfers/test_salesforce_to_gcs_system.py index afd0856fad244..d556f2d86e2f2 100644 --- a/providers/tests/google/cloud/transfers/test_salesforce_to_gcs_system.py +++ b/providers/tests/google/cloud/transfers/test_salesforce_to_gcs_system.py @@ -20,12 +20,14 @@ import pytest +from airflow.providers.google.cloud.example_dags import example_salesforce_to_gcs + from dev.tests_common.test_utils.gcp_system_helpers import ( - CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context, ) from dev.tests_common.test_utils.salesforce_system_helpers import provide_salesforce_connection +from dev.tests_common.test_utils.system_tests import get_test_run from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY CREDENTIALS_DIR = os.environ.get("CREDENTIALS_DIR", "/files/airflow-breeze-config/keys") @@ -42,4 +44,5 @@ class TestSalesforceIntoGCSExample(GoogleSystemTest): @provide_gcp_context(GCP_BIGQUERY_KEY) @provide_salesforce_connection(SALESFORCE_CREDENTIALS_PATH) def test_run_example_dag_salesforce_to_gcs_operator(self): - self.run_dag("example_salesforce_to_gcs", CLOUD_DAG_FOLDER) + run = get_test_run(example_salesforce_to_gcs.dag) + run() diff --git a/providers/tests/google/marketing_platform/operators/test_display_video_system.py b/providers/tests/google/marketing_platform/operators/test_display_video_system.py index 78f5d4ee021f9..49f44948abfdf 100644 --- a/providers/tests/google/marketing_platform/operators/test_display_video_system.py +++ b/providers/tests/google/marketing_platform/operators/test_display_video_system.py @@ -19,13 +19,18 @@ import pytest from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook -from airflow.providers.google.marketing_platform.example_dags.example_display_video import BUCKET +from airflow.providers.google.marketing_platform.example_dags.example_display_video import ( + BUCKET, + dag_example_display_video_misc, + dag_example_display_video_sdf, +) from dev.tests_common.test_utils.gcp_system_helpers import ( MARKETING_DAG_FOLDER, GoogleSystemTest, provide_gcp_context, ) +from dev.tests_common.test_utils.system_tests import get_test_run from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY, GMP_KEY # Requires the following scope: @@ -50,12 +55,14 @@ def teardown_method(self): @provide_gcp_context(GMP_KEY, scopes=SCOPES) def test_run_example_dag(self): - self.run_dag("example_display_video", MARKETING_DAG_FOLDER) + self.run_dag("example_display_video", MARKETING_DAG_FOLDER) # this dag does not exist? @provide_gcp_context(GMP_KEY, scopes=SCOPES) def test_run_example_dag_misc(self): - self.run_dag("example_display_video_misc", MARKETING_DAG_FOLDER) + run = get_test_run(dag_example_display_video_misc) + run() @provide_gcp_context(GMP_KEY, scopes=SCOPES) def test_run_example_dag_sdf(self): - self.run_dag("example_display_video_sdf", MARKETING_DAG_FOLDER) + run = get_test_run(dag_example_display_video_sdf) + run() diff --git a/providers/tests/system/google/cloud/dataprep/example_dataprep.py b/providers/tests/system/google/cloud/dataprep/example_dataprep.py index 9f603f43fb1b3..cdc736a41c66b 100644 --- a/providers/tests/system/google/cloud/dataprep/example_dataprep.py +++ b/providers/tests/system/google/cloud/dataprep/example_dataprep.py @@ -313,6 +313,7 @@ def delete_connection(connection_id: str) -> None: # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() + from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) diff --git a/tests/cli/commands/test_dag_command.py b/tests/cli/commands/test_dag_command.py index 00364794f1d53..f0c9a18c1a567 100644 --- a/tests/cli/commands/test_dag_command.py +++ b/tests/cli/commands/test_dag_command.py @@ -322,7 +322,7 @@ def test_cli_backfill_ignore_first_depends_on_past(self, mock_run): We just check we call dag.run() right. The behaviour of that kwarg is tested in test_jobs """ - dag_id = "test_dagrun_states_deadlock" + dag_id = "example_bash_operator" run_date = DEFAULT_DATE + timedelta(days=1) args = [ "dags", diff --git a/tests/core/test_example_dags_system.py b/tests/core/test_example_dags_system.py index c60b7325b125d..bd34d9bb15919 100644 --- a/tests/core/test_example_dags_system.py +++ b/tests/core/test_example_dags_system.py @@ -17,16 +17,128 @@ # under the License. from __future__ import annotations +from datetime import timedelta + +import pendulum import pytest +from sqlalchemy import select + +from airflow.models import DagRun +from airflow.operators.python import PythonOperator +from airflow.utils.module_loading import import_string +from airflow.utils.state import DagRunState +from airflow.utils.trigger_rule import TriggerRule +from dev.tests_common.test_utils.system_tests import get_test_run from dev.tests_common.test_utils.system_tests_class import SystemTest +def fail(): + raise ValueError + + +def get_dag_success(dag_maker): + with dag_maker( + dag_id="test_dagrun_states_success", + schedule=timedelta(days=1), + ) as dag: + dag4_task1 = PythonOperator( + task_id="test_dagrun_fail", + python_callable=fail, + ) + dag4_task2 = PythonOperator( + task_id="test_dagrun_succeed", trigger_rule=TriggerRule.ALL_FAILED, python_callable=print + ) + dag4_task2.set_upstream(dag4_task1) + return dag + + +def get_dag_fail(dag_maker): + with dag_maker( + dag_id="test_dagrun_states_fail", + schedule=timedelta(days=1), + ) as dag: + dag3_task1 = PythonOperator(task_id="to_fail", python_callable=fail) + dag3_task2 = PythonOperator(task_id="to_succeed", python_callable=print) + dag3_task2.set_upstream(dag3_task1) + return dag + + +def get_dag_fail_root(dag_maker): + with dag_maker( + dag_id="test_dagrun_states_root_fail", + schedule=timedelta(days=1), + ) as dag: + PythonOperator(task_id="test_dagrun_succeed", python_callable=print) + PythonOperator( + task_id="test_dagrun_fail", + python_callable=fail, + ) + return dag + + @pytest.mark.system("core") class TestExampleDagsSystem(SystemTest): @pytest.mark.parametrize( - "dag_id", + "module", ["example_bash_operator", "example_branch_operator", "tutorial_dag", "example_dag_decorator"], ) - def test_dag_example(self, dag_id): - self.run_dag(dag_id=dag_id) + def test_dag_example(self, module): + test_run = import_string(f"airflow.example_dags.{module}.test_run") + test_run() + + @pytest.mark.parametrize( + "factory, expected", + [ + (get_dag_fail, "failed"), + (get_dag_fail_root, "failed"), + (get_dag_success, "success"), + ], + ) + def test_dag_run_final_state(self, factory, expected, dag_maker, session): + """ + These tests are migrated tests that were added in PR #1289 + which was fixing issue #1225. + + I would be very surprised if these things were not covered elsewhere already + but, just in case, I'm migrating them to system tests. + """ + dag = factory(dag_maker) + run = get_test_run(dag) + with pytest.raises(AssertionError, match="The system test failed"): + run() + dr = session.scalar(select(DagRun)) + assert dr.state == "failed" + + def test_dag_root_task_start_date_future(self, dag_maker, session): + """ + These tests are migrated tests that were added in PR #1289 + which was fixing issue #1225. + + This one tests what happens when there's a dag with a root task with future start date. + + The dag should run, but no TI should be created for the task where start date in future. + """ + exec_date = pendulum.datetime(2021, 1, 1) + fut_start_date = pendulum.datetime(2021, 2, 1) + with dag_maker( + dag_id="dagrun_states_root_future", + schedule=timedelta(days=1), + catchup=False, + ) as dag: + PythonOperator( + task_id="current", + python_callable=lambda: print("hello"), + ) + PythonOperator( + task_id="future", + python_callable=lambda: print("hello"), + start_date=fut_start_date, + ) + run = get_test_run(dag, execution_date=exec_date) + run() + dr = session.scalar(select(DagRun)) + tis = dr.task_instances + assert dr.state == DagRunState.SUCCESS + assert len(tis) == 1 + assert tis[0].task_id == "current" diff --git a/tests/dags/test_future_start_date.py b/tests/dags/test_future_start_date.py new file mode 100644 index 0000000000000..dadfbff600f60 --- /dev/null +++ b/tests/dags/test_future_start_date.py @@ -0,0 +1,41 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from datetime import timedelta + +import pendulum + +from airflow.models.dag import DAG +from airflow.operators.empty import EmptyOperator +from airflow.operators.python import PythonOperator + +exec_date = pendulum.datetime(2021, 1, 1) +fut_start_date = pendulum.datetime(2021, 2, 1) +with DAG( + dag_id="test_dagrun_states_root_future", + schedule=timedelta(days=1), + catchup=True, + start_date=exec_date, +) as dag: + EmptyOperator(task_id="current") + PythonOperator( + task_id="future", + python_callable=lambda: print("hello"), + start_date=fut_start_date, + ) diff --git a/tests/dags/test_issue_1225.py b/tests/dags/test_issue_1225.py deleted file mode 100644 index 96a3ad156269e..0000000000000 --- a/tests/dags/test_issue_1225.py +++ /dev/null @@ -1,149 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -""" -DAG designed to test what happens when a DAG with pooled tasks is run -by a BackfillJob. -Addresses issue #1225. -""" - -from __future__ import annotations - -from datetime import datetime, timedelta - -from airflow.models.dag import DAG -from airflow.operators.empty import EmptyOperator -from airflow.operators.python import PythonOperator -from airflow.utils.trigger_rule import TriggerRule - -DEFAULT_DATE = datetime(2016, 1, 1) -default_args = dict(start_date=DEFAULT_DATE, owner="airflow") - - -def fail(): - raise ValueError("Expected failure.") - - -# DAG tests backfill with pooled tasks -# Previously backfill would queue the task but never run it -dag1 = DAG( - dag_id="test_backfill_pooled_task_dag", - schedule=timedelta(days=1), - default_args=default_args, -) -dag1_task1 = EmptyOperator( - task_id="test_backfill_pooled_task", - dag=dag1, - pool="test_backfill_pooled_task_pool", -) - -# dag2 has been moved to test_prev_dagrun_dep.py - -# DAG tests that a Dag run that doesn't complete is marked failed -dag3 = DAG( - dag_id="test_dagrun_states_fail", - schedule=timedelta(days=1), - default_args=default_args, -) -dag3_task1 = PythonOperator(task_id="test_dagrun_fail", dag=dag3, python_callable=fail) -dag3_task2 = EmptyOperator( - task_id="test_dagrun_succeed", - dag=dag3, -) -dag3_task2.set_upstream(dag3_task1) - -# DAG tests that a Dag run that completes but has a failure is marked success -dag4 = DAG( - dag_id="test_dagrun_states_success", - schedule=timedelta(days=1), - default_args=default_args, -) -dag4_task1 = PythonOperator( - task_id="test_dagrun_fail", - dag=dag4, - python_callable=fail, -) -dag4_task2 = EmptyOperator(task_id="test_dagrun_succeed", dag=dag4, trigger_rule=TriggerRule.ALL_FAILED) -dag4_task2.set_upstream(dag4_task1) - -# DAG tests that a Dag run that completes but has a root failure is marked fail -dag5 = DAG( - dag_id="test_dagrun_states_root_fail", - schedule=timedelta(days=1), - default_args=default_args, -) -dag5_task1 = EmptyOperator( - task_id="test_dagrun_succeed", - dag=dag5, -) -dag5_task2 = PythonOperator( - task_id="test_dagrun_fail", - dag=dag5, - python_callable=fail, -) - -# DAG tests that a Dag run that is deadlocked with no states is failed -dag6 = DAG( - dag_id="test_dagrun_states_deadlock", - schedule=timedelta(days=1), - default_args=default_args, -) -dag6_task1 = EmptyOperator( - task_id="test_depends_on_past", - depends_on_past=True, - dag=dag6, -) -dag6_task2 = EmptyOperator( - task_id="test_depends_on_past_2", - depends_on_past=True, - dag=dag6, -) -dag6_task2.set_upstream(dag6_task1) - - -# DAG tests that a Dag run that doesn't complete but has a root failure is marked running -dag8 = DAG( - dag_id="test_dagrun_states_root_fail_unfinished", - schedule=timedelta(days=1), - default_args=default_args, -) -dag8_task1 = EmptyOperator( - task_id="test_dagrun_unfinished", # The test will unset the task instance state after - # running this test - dag=dag8, -) -dag8_task2 = PythonOperator( - task_id="test_dagrun_fail", - dag=dag8, - python_callable=fail, -) - -# DAG tests that a Dag run that completes but has a root in the future is marked as success -dag9 = DAG( - dag_id="test_dagrun_states_root_future", - schedule=timedelta(days=1), - default_args=default_args, -) -dag9_task1 = EmptyOperator( - task_id="current", - dag=dag9, -) -dag9_task2 = EmptyOperator( - task_id="future", - dag=dag9, - start_date=DEFAULT_DATE + timedelta(days=1), -) diff --git a/tests/jobs/test_backfill_job.py b/tests/jobs/test_backfill_job.py index f1bb6f17d05c4..dead9be86230b 100644 --- a/tests/jobs/test_backfill_job.py +++ b/tests/jobs/test_backfill_job.py @@ -32,7 +32,6 @@ from airflow.cli import cli_parser from airflow.exceptions import ( AirflowException, - AirflowTaskTimeout, BackfillUnfinished, DagConcurrencyLimitReached, NoAvailablePoolSlot, @@ -54,7 +53,6 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import DagRunState, State, TaskInstanceState -from airflow.utils.timeout import timeout from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import DagRunType from tests.listeners import dag_listener @@ -1070,34 +1068,6 @@ def test_backfill_ordered_concurrent_execute(self, dag_maker, mock_executor): ], ] - def test_backfill_pooled_tasks(self): - """ - Test that queued tasks are executed by BackfillJobRunner - """ - session = settings.Session() - pool = Pool(pool="test_backfill_pooled_task_pool", slots=1, include_deferred=False) - session.add(pool) - session.commit() - session.close() - - dag = self.dagbag.get_dag("test_backfill_pooled_task_dag") - dag.clear() - - job = Job() - job_runner = BackfillJobRunner(job=job, dag=dag, start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) - - # run with timeout because this creates an infinite loop if not - # caught - try: - with timeout(seconds=20): - run_job(job=job, execute_callable=job_runner._execute) - except AirflowTaskTimeout: - logger.info("Timeout while waiting for task to complete") - run_id = f"backfill__{DEFAULT_DATE.isoformat()}" - ti = TI(task=dag.get_task("test_backfill_pooled_task"), run_id=run_id) - ti.refresh_from_db() - assert ti.state == State.SUCCESS - @pytest.mark.parametrize("ignore_depends_on_past", [True, False]) def test_backfill_depends_on_past_works_independently_on_ignore_depends_on_past( self, ignore_depends_on_past, mock_executor diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index a5748ebaeef9c..639a3528fad8c 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -43,7 +43,8 @@ from airflow.callbacks.database_callback_sink import DatabaseCallbackSink from airflow.callbacks.pipe_callback_sink import PipeCallbackSink from airflow.dag_processing.manager import DagFileProcessorAgent -from airflow.exceptions import AirflowException, RemovedInAirflow3Warning +from airflow.decorators import task +from airflow.exceptions import AirflowException from airflow.executors.base_executor import BaseExecutor from airflow.executors.executor_constants import MOCK_EXECUTOR from airflow.executors.executor_loader import ExecutorLoader @@ -2828,128 +2829,33 @@ def test_do_not_schedule_removed_task(self, dag_maker): assert [] == res - @provide_session - def evaluate_dagrun( - self, - dag_id, - expected_task_states, # dict of task_id: state - dagrun_state, - run_kwargs=None, - advance_execution_date=False, - session=None, - ): - """ - Helper for testing DagRun states with simple two-task DAGs. - This is hackish: a dag run is created but its tasks are - run by a backfill. - """ - - # todo: AIP-78 remove along with DAG.run() - # this only tests the backfill job runner, not the scheduler - - if run_kwargs is None: - run_kwargs = {} - - dag = self.dagbag.get_dag(dag_id) - dagrun_info = dag.next_dagrun_info(None) - assert dagrun_info is not None - data_interval = dag.infer_automated_data_interval(DEFAULT_LOGICAL_DATE) - triggered_by_kwargs = {"triggered_by": DagRunTriggeredByType.TEST} if AIRFLOW_V_3_0_PLUS else {} - dr = dag.create_dagrun( - run_type=DagRunType.SCHEDULED, - execution_date=dagrun_info.logical_date, - state=None, - session=session, - data_interval=data_interval, - **triggered_by_kwargs, - ) - - if advance_execution_date: - # run a second time to schedule a dagrun after the start_date - dr = dag.create_dagrun( - run_type=DagRunType.SCHEDULED, - execution_date=dr.data_interval_end, - state=None, - session=session, - data_interval=data_interval, - **triggered_by_kwargs, - ) - ex_date = dr.execution_date - - for tid, state in expected_task_states.items(): - if state == State.FAILED: - self.null_exec.mock_task_fail(dag_id, tid, dr.run_id) - - try: - dag = DagBag().get_dag(dag.dag_id) - # This needs a _REAL_ dag, not the serialized version - assert not isinstance(dag, SerializedDAG) - # TODO: Can this be replaced with `self.run_scheduler_until_dagrun_terminal. `dag.run` isn't - # great to use here as it uses BackfillJobRunner! - for _ in _mock_executor(self.null_exec): - dag.run(start_date=ex_date, end_date=ex_date, **run_kwargs) - except AirflowException: - pass - - # load dagrun - dr = DagRun.find(dag_id=dag_id, execution_date=ex_date, session=session) - dr = dr[0] - dr.dag = dag - - assert dr.state == dagrun_state - - # test tasks - for task_id, expected_state in expected_task_states.items(): - ti = dr.get_task_instance(task_id) - assert ti.state == expected_state - - def test_dagrun_fail(self): + @pytest.mark.parametrize( + "ti_states, run_state", + [ + (["failed", "success"], "failed"), + (["success", "success"], "success"), + ], + ) + def test_dagrun_state_correct(self, ti_states, run_state, dag_maker, session): """ DagRuns with one failed and one incomplete root task -> FAILED """ - # todo: AIP-78 remove along with DAG.run() - # this only tests the backfill job runner, not the scheduler - with pytest.warns(RemovedInAirflow3Warning): - self.evaluate_dagrun( - dag_id="test_dagrun_states_fail", - expected_task_states={ - "test_dagrun_fail": State.FAILED, - "test_dagrun_succeed": State.UPSTREAM_FAILED, - }, - dagrun_state=State.FAILED, - ) + with dag_maker(): - def test_dagrun_success(self): - """ - DagRuns with one failed and one successful root task -> SUCCESS - """ - # todo: AIP-78 remove along with DAG.run() - # this only tests the backfill job runner, not the scheduler - with pytest.warns(RemovedInAirflow3Warning): - self.evaluate_dagrun( - dag_id="test_dagrun_states_success", - expected_task_states={ - "test_dagrun_fail": State.FAILED, - "test_dagrun_succeed": State.SUCCESS, - }, - dagrun_state=State.SUCCESS, - ) + @task + def my_task(): ... - def test_dagrun_root_fail(self): - """ - DagRuns with one successful and one failed root task -> FAILED - """ - # todo: AIP-78 remove along with DAG.run() - # this only tests the backfill job runner, not the scheduler - with pytest.warns(RemovedInAirflow3Warning): - self.evaluate_dagrun( - dag_id="test_dagrun_states_root_fail", - expected_task_states={ - "test_dagrun_succeed": State.SUCCESS, - "test_dagrun_fail": State.FAILED, - }, - dagrun_state=State.FAILED, - ) + for _ in ti_states: + my_task() + dr = dag_maker.create_dagrun(state="running", triggered_by=DagRunTriggeredByType.TIMETABLE) + for idx, state in enumerate(ti_states): + dr.task_instances[idx].state = state + session.commit() + scheduler_job = Job(executor=self.null_exec) + self.job_runner = SchedulerJobRunner(job=scheduler_job) + self.job_runner.processor_agent = mock.MagicMock() + self.job_runner._do_scheduling(session) + assert session.query(DagRun).one().state == run_state def test_dagrun_root_after_dagrun_unfinished(self, mock_executor): """ @@ -2963,57 +2869,15 @@ def test_dagrun_root_after_dagrun_unfinished(self, mock_executor): dag.sync_to_db() scheduler_job = Job() - self.job_runner = SchedulerJobRunner(job=scheduler_job, num_runs=1, subdir=dag.fileloc) + self.job_runner = SchedulerJobRunner(job=scheduler_job, num_runs=2, subdir=dag.fileloc) run_job(scheduler_job, execute_callable=self.job_runner._execute) - first_run = DagRun.find(dag_id=dag_id, execution_date=DEFAULT_DATE)[0] + first_run = DagRun.find(dag_id=dag_id)[0] ti_ids = [(ti.task_id, ti.state) for ti in first_run.get_task_instances()] assert ti_ids == [("current", State.SUCCESS)] assert first_run.state in [State.SUCCESS, State.RUNNING] - def test_dagrun_deadlock_ignore_depends_on_past_advance_ex_date(self): - """ - DagRun is marked a success if ignore_first_depends_on_past=True - - Test that an otherwise-deadlocked dagrun is marked as a success - if ignore_first_depends_on_past=True and the dagrun execution_date - is after the start_date. - """ - # todo: AIP-78 remove along with DAG.run() - # this only tests the backfill job runner, not the scheduler - with pytest.warns(RemovedInAirflow3Warning): - self.evaluate_dagrun( - dag_id="test_dagrun_states_deadlock", - expected_task_states={ - "test_depends_on_past": State.SUCCESS, - "test_depends_on_past_2": State.SUCCESS, - }, - dagrun_state=State.SUCCESS, - advance_execution_date=True, - run_kwargs=dict(ignore_first_depends_on_past=True), - ) - - def test_dagrun_deadlock_ignore_depends_on_past(self): - """ - Test that ignore_first_depends_on_past doesn't affect results - (this is the same test as - test_dagrun_deadlock_ignore_depends_on_past_advance_ex_date except - that start_date == execution_date so depends_on_past is irrelevant). - """ - # todo: AIP-78 remove along with DAG.run() - # this only tests the backfill job runner, not the scheduler - with pytest.warns(RemovedInAirflow3Warning): - self.evaluate_dagrun( - dag_id="test_dagrun_states_deadlock", - expected_task_states={ - "test_depends_on_past": State.SUCCESS, - "test_depends_on_past_2": State.SUCCESS, - }, - dagrun_state=State.SUCCESS, - run_kwargs=dict(ignore_first_depends_on_past=True), - ) - @pytest.mark.parametrize( "configs", [ @@ -3136,9 +3000,14 @@ def test_scheduler_multiprocessing(self, configs): Test that the scheduler can successfully queue multiple dags in parallel """ with conf_vars(configs): - dag_ids = ["test_start_date_scheduling", "test_dagrun_states_success"] + dag_ids = [ + "test_start_date_scheduling", + "test_task_start_date_scheduling", + ] for dag_id in dag_ids: dag = self.dagbag.get_dag(dag_id) + if not dag: + raise ValueError(f"could not find dag {dag_id}") dag.clear() scheduler_job = Job( diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index 997ef06329fde..67dc699fc3c8b 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -25,7 +25,6 @@ import re import weakref from datetime import timedelta -from importlib import reload from pathlib import Path from typing import TYPE_CHECKING from unittest import mock @@ -45,12 +44,8 @@ AirflowException, DuplicateTaskIdFound, ParamValidationError, - RemovedInAirflow3Warning, UnknownExecutorException, ) -from airflow.executors import executor_loader -from airflow.executors.local_executor import LocalExecutor -from airflow.executors.sequential_executor import SequentialExecutor from airflow.models.asset import ( AssetAliasModel, AssetDagRunQueue, @@ -2740,20 +2735,6 @@ def test_asset_expression(self, session: Session) -> None: ] } - @mock.patch("airflow.models.dag.run_job") - def test_dag_executors(self, run_job_mock): - # todo: AIP-78 remove along with DAG.run() - # this only tests the backfill job runner, not the scheduler - with pytest.warns(RemovedInAirflow3Warning): - dag = DAG(dag_id="test", schedule=None) - reload(executor_loader) - with conf_vars({("core", "executor"): "SequentialExecutor"}): - dag.run() - assert isinstance(run_job_mock.call_args_list[0].kwargs["job"].executor, SequentialExecutor) - - dag.run(local=True) - assert isinstance(run_job_mock.call_args_list[1].kwargs["job"].executor, LocalExecutor) - class TestQueries: def setup_method(self) -> None: diff --git a/tests/models/test_xcom_arg.py b/tests/models/test_xcom_arg.py index b161020d1fb91..fbdd500661d22 100644 --- a/tests/models/test_xcom_arg.py +++ b/tests/models/test_xcom_arg.py @@ -23,7 +23,6 @@ from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.types import NOTSET -from dev.tests_common.test_utils.config import conf_vars from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = pytest.mark.db_test @@ -146,7 +145,6 @@ def test_xcom_not_iterable(self, dag_maker): @pytest.mark.system("core") class TestXComArgRuntime: - @conf_vars({("core", "executor"): "DebugExecutor"}) def test_xcom_pass_to_op(self, dag_maker): with dag_maker(dag_id="test_xcom_pass_to_op") as dag: operator = PythonOperator( @@ -161,9 +159,8 @@ def test_xcom_pass_to_op(self, dag_maker): task_id="assert_is_value_1", ) operator >> operator2 - dag.run() + dag.test() - @conf_vars({("core", "executor"): "DebugExecutor"}) def test_xcom_push_and_pass(self, dag_maker): def push_xcom_value(key, value, **context): ti = context["task_instance"] @@ -182,7 +179,7 @@ def push_xcom_value(key, value, **context): op_args=[xarg], ) op1 >> op2 - dag.run() + dag.test() @pytest.mark.skip_if_database_isolation_mode # Does not work in db isolation mode From e0b7077745523cacd34176c4b6a1e0e887a2c41c Mon Sep 17 00:00:00 2001 From: Daniel Standish <15932138+dstandish@users.noreply.github.com> Date: Thu, 10 Oct 2024 14:40:10 -0700 Subject: [PATCH 066/125] Add test for behavior for paused backfill (#42837) ----- Co-authored-by: Kaxil Naik --- tests/jobs/test_scheduler_job.py | 90 +++++++++++++++++++++++++++++++- 1 file changed, 89 insertions(+), 1 deletion(-) diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index 639a3528fad8c..78c3acdce0de6 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -53,7 +53,7 @@ from airflow.jobs.local_task_job_runner import LocalTaskJobRunner from airflow.jobs.scheduler_job_runner import SchedulerJobRunner from airflow.models.asset import AssetDagRunQueue, AssetEvent, AssetModel -from airflow.models.backfill import _create_backfill +from airflow.models.backfill import Backfill, _create_backfill from airflow.models.dag import DAG, DagModel from airflow.models.dagbag import DagBag from airflow.models.dagrun import DagRun @@ -4960,6 +4960,94 @@ def _running_counts(): assert session.scalar(select(func.count()).select_from(DagRun)) == 46 assert session.scalar(select(func.count()).where(DagRun.dag_id == dag1_dag_id)) == 36 + @pytest.mark.parametrize( + "pause_it, expected_running", + [ + (True, 0), + (False, 3), + ], + ) + def test_backfill_runs_not_started_when_backfill_paused( + self, pause_it, expected_running, dag_maker, session + ): + """ + When backfill is paused, will not start. + """ + dag1_dag_id = "test_dag1" + with dag_maker( + dag_id=dag1_dag_id, + start_date=DEFAULT_DATE, + schedule=timedelta(days=1), + max_active_runs=1, + ): + EmptyOperator(task_id="mytask") + + def _running_counts(): + dag1_non_b_running = ( + session.query(func.count(DagRun.id)) + .filter( + DagRun.dag_id == dag1_dag_id, + DagRun.state == State.RUNNING, + DagRun.run_type != DagRunType.BACKFILL_JOB, + ) + .scalar() + ) + dag1_b_running = ( + session.query(func.count(DagRun.id)) + .filter( + DagRun.dag_id == dag1_dag_id, + DagRun.state == State.RUNNING, + DagRun.run_type == DagRunType.BACKFILL_JOB, + ) + .scalar() + ) + total_running_count = ( + session.query(func.count(DagRun.id)).filter(DagRun.state == State.RUNNING).scalar() + ) + return dag1_non_b_running, dag1_b_running, total_running_count + + scheduler_job = Job() + self.job_runner = SchedulerJobRunner(job=scheduler_job, subdir=os.devnull) + scheduler_job.executor = MockExecutor(do_update=False) + self.job_runner.processor_agent = mock.MagicMock(spec=DagFileProcessorAgent) + + from_date = pendulum.parse("2021-01-01") + to_date = pendulum.parse("2021-01-06") + b = _create_backfill( + dag_id=dag1_dag_id, + from_date=from_date, + to_date=to_date, + max_active_runs=3, + reverse=False, + dag_run_conf={}, + ) + dag1_non_b_running, dag1_b_running, total_running = _running_counts() + + # initial state -- nothing is running + assert dag1_non_b_running == 0 + assert dag1_b_running == 0 + assert total_running == 0 + assert session.query(func.count(DagRun.id)).scalar() == 6 + assert session.scalar(select(func.count()).where(DagRun.dag_id == dag1_dag_id)) == 6 + + if pause_it: + b = session.get(Backfill, b.id) + b.is_paused = True + + session.commit() + + # now let's run scheduler once + self.job_runner._start_queued_dagruns(session) + session.flush() + + assert DagRun.DEFAULT_DAGRUNS_TO_EXAMINE == 20 + dag1_non_b_running, dag1_b_running, total_running = _running_counts() + assert dag1_non_b_running == 0 + assert dag1_b_running == expected_running + assert total_running == expected_running + assert session.scalar(select(func.count()).select_from(DagRun)) == 6 + assert session.scalar(select(func.count()).where(DagRun.dag_id == dag1_dag_id)) == 6 + def test_start_queued_dagruns_do_follow_execution_date_order(self, dag_maker): session = settings.Session() with dag_maker("test_dag1", max_active_runs=1): From df4df2a8f92869c4c47863935da6def37ba7373e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Josef=20=C5=A0im=C3=A1nek?= Date: Fri, 11 Oct 2024 00:21:50 +0200 Subject: [PATCH 067/125] Use url_from_endpoint inside HttpHook. (#42785) --- providers/src/airflow/providers/http/hooks/http.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/providers/src/airflow/providers/http/hooks/http.py b/providers/src/airflow/providers/http/hooks/http.py index a53dd7566cefc..05b432626b8e3 100644 --- a/providers/src/airflow/providers/http/hooks/http.py +++ b/providers/src/airflow/providers/http/hooks/http.py @@ -169,7 +169,7 @@ def run( session = self.get_conn(headers) - url = _url_from_endpoint(self.base_url, endpoint) + url = self.url_from_endpoint(endpoint) if self.tcp_keep_alive: keep_alive_adapter = TCPKeepAliveAdapter( From 61aadddf5158ee30b7f375dbd947b16e4afa3942 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Thu, 10 Oct 2024 23:50:36 +0100 Subject: [PATCH 068/125] Fix typo in Breeze (#42919) A couple of minor typos due to newlines --- dev/breeze/doc/images/output_testing.svg | 4 ++-- dev/breeze/doc/images/output_testing.txt | 2 +- dev/breeze/doc/images/output_testing_non-db-tests.svg | 2 +- dev/breeze/doc/images/output_testing_non-db-tests.txt | 2 +- dev/breeze/doc/images/output_testing_tests.svg | 2 +- dev/breeze/doc/images/output_testing_tests.txt | 2 +- dev/breeze/src/airflow_breeze/commands/testing_commands.py | 6 +++--- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/dev/breeze/doc/images/output_testing.svg b/dev/breeze/doc/images/output_testing.svg index 228a97056c95a..c85e5a60979a7 100644 --- a/dev/breeze/doc/images/output_testing.svg +++ b/dev/breeze/doc/images/output_testing.svg @@ -133,7 +133,7 @@
╭─ Testing ────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ tests               Run the specified unit tests. This is a low level testing command that allows you to run       various kind of tests subset with a number of options. You can also use dedicated commands     -suchus db_tests, non_db_tests, integration_tests for more opinionated test suite execution.    +such as db_tests, non_db_tests, integration_tests for more opinionated test suite execution.   integration-tests   Run the specified integration tests.                                                           helm-tests          Run Helm chart tests.                                                                          docker-compose-testsRun docker-compose tests.                                                                      @@ -142,7 +142,7 @@ db-tests    Run all (default) or specified DB-bound unit tests. This is a dedicated command that only runs DB      tests and it runs them in parallel via splitting tests by test types into separate containers with     separate database started for each container.                                                          -non-db-testsRun all (default) or specified Non-DB unit tests. This is a dedicated command that onlyruns Non-DB     +non-db-testsRun all (default) or specified Non-DB unit tests. This is a dedicated command that only runs Non-DB    tests and it runs them in parallel via pytest-xdist in single container, with `none` backend set.      ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_testing.txt b/dev/breeze/doc/images/output_testing.txt index 3f7450936bae4..5f088aae9e13e 100644 --- a/dev/breeze/doc/images/output_testing.txt +++ b/dev/breeze/doc/images/output_testing.txt @@ -1 +1 @@ -d81f2195fbceabe4fc7d17bc3b83c5b1 +1447b50a7f4c843b01e92358a6b2e072 diff --git a/dev/breeze/doc/images/output_testing_non-db-tests.svg b/dev/breeze/doc/images/output_testing_non-db-tests.svg index 28fc44c028300..da43621dd6931 100644 --- a/dev/breeze/doc/images/output_testing_non-db-tests.svg +++ b/dev/breeze/doc/images/output_testing_non-db-tests.svg @@ -369,7 +369,7 @@ Usage:breeze testing non-db-tests[OPTIONS] -Run all (default) or specified Non-DB unit tests. This is a dedicated command that onlyruns Non-DB tests and it runs  +Run all (default) or specified Non-DB unit tests. This is a dedicated command that only runs Non-DB tests and it runs  them in parallel via pytest-xdist in single container, with `none` backend set. ╭─ Select test types to run ───────────────────────────────────────────────────────────────────────────────────────────╮ diff --git a/dev/breeze/doc/images/output_testing_non-db-tests.txt b/dev/breeze/doc/images/output_testing_non-db-tests.txt index 8be059e551195..1080b5a377717 100644 --- a/dev/breeze/doc/images/output_testing_non-db-tests.txt +++ b/dev/breeze/doc/images/output_testing_non-db-tests.txt @@ -1 +1 @@ -3120758ec7ae00c88305cca093595aca +2cc222da8b9f31b93b527220b76b48a2 diff --git a/dev/breeze/doc/images/output_testing_tests.svg b/dev/breeze/doc/images/output_testing_tests.svg index a5da2b4d53ff5..c20e2ef16b243 100644 --- a/dev/breeze/doc/images/output_testing_tests.svg +++ b/dev/breeze/doc/images/output_testing_tests.svg @@ -457,7 +457,7 @@ Usage:breeze testing tests[OPTIONS] [EXTRA_PYTEST_ARGS]... Run the specified unit tests. This is a low level testing command that allows you to run various kind of tests subset  -with a number of options. You can also use dedicated commands suchus db_tests, non_db_tests, integration_tests for  +with a number of options. You can also use dedicated commands such as db_tests, non_db_tests, integration_tests for  more opinionated test suite execution. ╭─ Select test types to run (tests can also be selected by command args individually) ─────────────────────────────────╮ diff --git a/dev/breeze/doc/images/output_testing_tests.txt b/dev/breeze/doc/images/output_testing_tests.txt index 7e33718d66c69..d19f6cf5abdae 100644 --- a/dev/breeze/doc/images/output_testing_tests.txt +++ b/dev/breeze/doc/images/output_testing_tests.txt @@ -1 +1 @@ -2be847f8a1f4e2b80c6da1e4433203aa +15002aa129ce25039921f800fb1cf744 diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands.py b/dev/breeze/src/airflow_breeze/commands/testing_commands.py index 61b260bea09ae..c3bb4e9193204 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands.py @@ -499,8 +499,8 @@ def _verify_parallelism_parameters( @group_for_testing.command( name="tests", help="Run the specified unit tests. This is a low level testing command that allows you to run " - "various kind of tests subset with a number of options. You can also use dedicated commands such" - "us db_tests, non_db_tests, integration_tests for more opinionated test suite execution.", + "various kind of tests subset with a number of options. You can also use dedicated commands such " + "as db_tests, non_db_tests, integration_tests for more opinionated test suite execution.", context_settings=dict( ignore_unknown_options=True, allow_extra_args=True, @@ -624,7 +624,7 @@ def command_for_db_tests(**kwargs): @group_for_testing.command( name="non-db-tests", - help="Run all (default) or specified Non-DB unit tests. This is a dedicated command that only" + help="Run all (default) or specified Non-DB unit tests. This is a dedicated command that only " "runs Non-DB tests and it runs them in parallel via pytest-xdist in single container, " "with `none` backend set.", context_settings=dict( From 6c4d67f34e72410bd60e8c83fdd5085634e3516a Mon Sep 17 00:00:00 2001 From: Kim Date: Fri, 11 Oct 2024 00:26:51 -0600 Subject: [PATCH 069/125] Docs: Add templating info to TaskFlow tutorial (#42887) --- .../sql/tutorial_taskflow_template.sql | 23 ++++ .../tutorial_taskflow_templates.py | 107 ++++++++++++++++++ docs/apache-airflow/tutorial/taskflow.rst | 56 +++++++++ 3 files changed, 186 insertions(+) create mode 100644 airflow/example_dags/sql/tutorial_taskflow_template.sql create mode 100644 airflow/example_dags/tutorial_taskflow_templates.py diff --git a/airflow/example_dags/sql/tutorial_taskflow_template.sql b/airflow/example_dags/sql/tutorial_taskflow_template.sql new file mode 100644 index 0000000000000..375c39eac610b --- /dev/null +++ b/airflow/example_dags/sql/tutorial_taskflow_template.sql @@ -0,0 +1,23 @@ +/* + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. +*/ + +select * from test_data +where 1=1 + and run_id = '{{ run_id }}' + and something_else = '{{ params.foobar }}' diff --git a/airflow/example_dags/tutorial_taskflow_templates.py b/airflow/example_dags/tutorial_taskflow_templates.py new file mode 100644 index 0000000000000..925f60524b5ea --- /dev/null +++ b/airflow/example_dags/tutorial_taskflow_templates.py @@ -0,0 +1,107 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +# [START tutorial] +# [START import_module] +import pendulum + +from airflow.decorators import dag, task +from airflow.operators.python import get_current_context + +# [END import_module] + + +# [START instantiate_dag] +@dag( + schedule="@daily", + start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), + catchup=False, + tags=["example"], + params={"foobar": "param_from_dag", "other_param": "from_dag"}, +) +def tutorial_taskflow_templates(): + """ + ### TaskFlow API Tutorial Documentation + This is a simple data pipeline example which demonstrates the use of + the templates in the TaskFlow API. + Documentation that goes along with the Airflow TaskFlow API tutorial is + located + [here](https://airflow.apache.org/docs/apache-airflow/stable/tutorial_taskflow_api.html) + """ + # [END instantiate_dag] + + # [START template_test] + @task( + # Causes variables that end with `.sql` to be read and templates + # within to be rendered. + templates_exts=[".sql"], + ) + def template_test(sql, test_var, data_interval_end): + context = get_current_context() + + # Will print... + # select * from test_data + # where 1=1 + # and run_id = 'scheduled__2024-10-09T00:00:00+00:00' + # and something_else = 'param_from_task' + print(f"sql: {sql}") + + # Will print `scheduled__2024-10-09T00:00:00+00:00` + print(f"test_var: {test_var}") + + # Will print `2024-10-10 00:00:00+00:00`. + # Note how we didn't pass this value when calling the task. Instead + # it was passed by the decorator from the context + print(f"data_interval_end: {data_interval_end}") + + # Will print... + # run_id: scheduled__2024-10-09T00:00:00+00:00; params.other_param: from_dag + template_str = "run_id: {{ run_id }}; params.other_param: {{ params.other_param }}" + rendered_template = context["task"].render_template( + template_str, + context, + ) + print(f"rendered template: {rendered_template}") + + # Will print the full context dict + print(f"context: {context}") + + # [END template_test] + + # [START main_flow] + template_test.override( + # Will be merged with the dict defined in the dag + # and override existing parameters. + # + # Must be passed into the decorator's parameters + # through `.override()` not into the actual task + # function + params={"foobar": "param_from_task"}, + )( + sql="sql/test.sql", + test_var="{{ run_id }}", + ) + # [END main_flow] + + +# [START dag_invocation] +tutorial_taskflow_templates() +# [END dag_invocation] + +# [END tutorial] diff --git a/docs/apache-airflow/tutorial/taskflow.rst b/docs/apache-airflow/tutorial/taskflow.rst index e15e1c78045c2..37d0d93f59994 100644 --- a/docs/apache-airflow/tutorial/taskflow.rst +++ b/docs/apache-airflow/tutorial/taskflow.rst @@ -629,6 +629,62 @@ method. Current context is accessible only during the task execution. The context is not accessible during ``pre_execute`` or ``post_execute``. Calling this method outside execution context will raise an error. +Using templates in decorated tasks +---------------------------------------------- + +Arguments passed to your decorated function are automatically templated. + +You can also use the ``templates_exts`` parameter to template entire files. + +.. code-block:: python + + @task(templates_exts=[".sql"]) + def template_test(sql): + print(f"sql: {sql}") + + + template_test(sql="sql/test.sql") + +This will read the content of ``sql/test.sql`` and replace all template variables. You can also pass a list of files and all of them will be templated. + +You can pass additional parameters to the template engine through `the params parameter `_. + +However, the ``params`` parameter must be passed to the decorator and not to your function directly, such as ``@task(templates_exts=['.sql'], params={'my_param'})`` and can then be used with ``{{ params.my_param }}`` in your templated files and function parameters. + +Alternatively, you can also pass it using the ``.override()`` method: + +.. code-block:: python + + @task() + def template_test(input_var): + print(f"input_var: {input_var}") + + + template_test.override(params={"my_param": "wow"})( + input_var="my param is: {{ params.my_param }}", + ) + +Finally, you can also manually render templates: + +.. code-block:: python + + @task(params={"my_param": "wow"}) + def template_test(): + template_str = "run_id: {{ run_id }}; params.my_param: {{ params.my_param }}" + + context = get_current_context() + rendered_template = context["task"].render_template( + template_str, + context, + ) + +Here is a full example that demonstrates everything above: + +.. exampleinclude:: /../../airflow/example_dags/tutorial_taskflow_templates.py + :language: python + :start-after: [START tutorial] + :end-before: [END tutorial] + Conditionally skipping tasks ---------------------------- From 7202ee84b3204bfcada7effe34912cfd31712e9f Mon Sep 17 00:00:00 2001 From: Rasnar Date: Fri, 11 Oct 2024 08:37:14 +0200 Subject: [PATCH 070/125] Add possibility to override the conn type for Druid (#42793) * Add possibility to override the conn type for Druid Minor fix, which allows to use the schema which are specified in theschema rather than `http` as default. In the same time it doesn't changethe logic as any conn_type can be selected. Intuitevely it's expectedthat anything specified in `schema` field will actually take precedencein the building the desired url. * Add druid endpoint connection from another PR * Fix missing scheme in test * Set schema to None where it's unused Even though we don't need it directly set, by default the mock will set it to an internal object, thus we need to override it to None. --------- Co-authored-by: Oleg Auckenthaler --- .../providers/apache/druid/hooks/druid.py | 5 ++- .../tests/apache/druid/hooks/test_druid.py | 44 ++++++++++++++++--- 2 files changed, 43 insertions(+), 6 deletions(-) diff --git a/providers/src/airflow/providers/apache/druid/hooks/druid.py b/providers/src/airflow/providers/apache/druid/hooks/druid.py index ca315b3a2c751..c865adef412fb 100644 --- a/providers/src/airflow/providers/apache/druid/hooks/druid.py +++ b/providers/src/airflow/providers/apache/druid/hooks/druid.py @@ -86,7 +86,10 @@ def get_conn_url(self, ingestion_type: IngestionType = IngestionType.BATCH) -> s """Get Druid connection url.""" host = self.conn.host port = self.conn.port - conn_type = self.conn.conn_type or "http" + if self.conn.schema: + conn_type = self.conn.schema + else: + conn_type = self.conn.conn_type or "http" if ingestion_type == IngestionType.BATCH: endpoint = self.conn.extra_dejson.get("endpoint", "") else: diff --git a/providers/tests/apache/druid/hooks/test_druid.py b/providers/tests/apache/druid/hooks/test_druid.py index 9befbf37f02a6..f01175942be4a 100644 --- a/providers/tests/apache/druid/hooks/test_druid.py +++ b/providers/tests/apache/druid/hooks/test_druid.py @@ -42,9 +42,14 @@ class TestDRuidhook(DruidHook): self.is_sql_based_ingestion = False def get_conn_url(self, ingestion_type: IngestionType = IngestionType.BATCH): + if self.conn.schema: + conn_type = self.conn.schema + else: + conn_type = "http" + if ingestion_type == IngestionType.MSQ: - return "http://druid-overlord:8081/druid/v2/sql/task" - return "http://druid-overlord:8081/druid/indexer/v1/task" + return f"{conn_type}://druid-overlord:8081/druid/v2/sql/task" + return f"{conn_type}://druid-overlord:8081/druid/indexer/v1/task" self.db_hook = TestDRuidhook() @@ -257,7 +262,8 @@ def get_conn_url(self, ingestion_type: IngestionType = IngestionType.BATCH): def test_conn_property(self, mock_get_connection): get_conn_value = MagicMock() get_conn_value.host = "test_host" - get_conn_value.conn_type = "https" + get_conn_value.conn_type = "http" + get_conn_value.schema = None get_conn_value.port = "1" get_conn_value.extra_dejson = {"endpoint": "ingest"} mock_get_connection.return_value = get_conn_value @@ -268,8 +274,22 @@ def test_conn_property(self, mock_get_connection): def test_get_conn_url(self, mock_get_connection): get_conn_value = MagicMock() get_conn_value.host = "test_host" - get_conn_value.conn_type = "https" + get_conn_value.conn_type = "http" + get_conn_value.schema = None + get_conn_value.port = "1" + get_conn_value.extra_dejson = {"endpoint": "ingest"} + mock_get_connection.return_value = get_conn_value + hook = DruidHook(timeout=1, max_ingestion_time=5) + assert hook.get_conn_url() == "http://test_host:1/ingest" + + @patch("airflow.providers.apache.druid.hooks.druid.DruidHook.get_connection") + def test_get_conn_url_with_schema(self, mock_get_connection): + get_conn_value = MagicMock() + get_conn_value.host = "test_host" + get_conn_value.conn_type = "http" + get_conn_value.schema = None get_conn_value.port = "1" + get_conn_value.schema = "https" get_conn_value.extra_dejson = {"endpoint": "ingest"} mock_get_connection.return_value = get_conn_value hook = DruidHook(timeout=1, max_ingestion_time=5) @@ -279,8 +299,21 @@ def test_get_conn_url(self, mock_get_connection): def test_get_conn_url_with_ingestion_type(self, mock_get_connection): get_conn_value = MagicMock() get_conn_value.host = "test_host" - get_conn_value.conn_type = "https" + get_conn_value.conn_type = "http" + get_conn_value.schema = None + get_conn_value.port = "1" + get_conn_value.extra_dejson = {"endpoint": "ingest", "msq_endpoint": "sql_ingest"} + mock_get_connection.return_value = get_conn_value + hook = DruidHook(timeout=1, max_ingestion_time=5) + assert hook.get_conn_url(IngestionType.MSQ) == "http://test_host:1/sql_ingest" + + @patch("airflow.providers.apache.druid.hooks.druid.DruidHook.get_connection") + def test_get_conn_url_with_ingestion_type_and_schema(self, mock_get_connection): + get_conn_value = MagicMock() + get_conn_value.host = "test_host" + get_conn_value.conn_type = "http" get_conn_value.port = "1" + get_conn_value.schema = "https" get_conn_value.extra_dejson = {"endpoint": "ingest", "msq_endpoint": "sql_ingest"} mock_get_connection.return_value = get_conn_value hook = DruidHook(timeout=1, max_ingestion_time=5) @@ -343,6 +376,7 @@ def setup_method(self): self.conn = conn = MagicMock() self.conn.host = "host" self.conn.port = "1000" + self.conn.schema = None self.conn.conn_type = "druid" self.conn.extra_dejson = {"endpoint": "druid/v2/sql"} self.conn.cursor.return_value = self.cur From 037274398d5380f1ab5e94f6fd60e742840fe8b4 Mon Sep 17 00:00:00 2001 From: Kalyan R Date: Fri, 11 Oct 2024 12:27:09 +0530 Subject: [PATCH 071/125] fix: HttpSensorTrigger to include `method` when serializing (#42925) --- .../airflow/providers/http/triggers/http.py | 1 + providers/tests/http/triggers/test_http.py | 33 ++++++++++++++++++- 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/providers/src/airflow/providers/http/triggers/http.py b/providers/src/airflow/providers/http/triggers/http.py index 59a484b081e04..543cd323b80a3 100644 --- a/providers/src/airflow/providers/http/triggers/http.py +++ b/providers/src/airflow/providers/http/triggers/http.py @@ -167,6 +167,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: { "endpoint": self.endpoint, "data": self.data, + "method": self.method, "headers": self.headers, "extra_options": self.extra_options, "http_conn_id": self.http_conn_id, diff --git a/providers/tests/http/triggers/test_http.py b/providers/tests/http/triggers/test_http.py index a4f2559876fae..8af78149b24a7 100644 --- a/providers/tests/http/triggers/test_http.py +++ b/providers/tests/http/triggers/test_http.py @@ -30,7 +30,7 @@ from requests.structures import CaseInsensitiveDict from yarl import URL -from airflow.providers.http.triggers.http import HttpTrigger +from airflow.providers.http.triggers.http import HttpSensorTrigger, HttpTrigger from airflow.triggers.base import TriggerEvent HTTP_PATH = "airflow.providers.http.triggers.http.{}" @@ -56,6 +56,18 @@ def trigger(): ) +@pytest.fixture +def sensor_trigger(): + return HttpSensorTrigger( + http_conn_id=TEST_CONN_ID, + endpoint=TEST_ENDPOINT, + method=TEST_METHOD, + headers=TEST_HEADERS, + data=TEST_DATA, + extra_options=TEST_EXTRA_OPTIONS, + ) + + @pytest.fixture def client_response(): client_response = mock.AsyncMock(ClientResponse) @@ -153,3 +165,22 @@ async def test_trigger_on_post_with_data(self, mock_http_post, trigger): assert kwargs["data"] == TEST_DATA assert kwargs["json"] is None assert kwargs["params"] is None + + +class TestHttpSensorTrigger: + def test_serialization(self, sensor_trigger): + """ + Asserts that the HttpSensorTrigger correctly serializes its arguments + and classpath. + """ + classpath, kwargs = sensor_trigger.serialize() + assert classpath == "airflow.providers.http.triggers.http.HttpSensorTrigger" + assert kwargs == { + "http_conn_id": TEST_CONN_ID, + "endpoint": TEST_ENDPOINT, + "method": TEST_METHOD, + "headers": TEST_HEADERS, + "data": TEST_DATA, + "extra_options": TEST_EXTRA_OPTIONS, + "poke_interval": 5.0, + } From 23ba1dfbfa7db8b0e78fe908d921f82b2257c983 Mon Sep 17 00:00:00 2001 From: Pierre Jeambrun Date: Fri, 11 Oct 2024 15:31:10 +0800 Subject: [PATCH 072/125] AIP-84 Fix dag display name search (#42863) * Fix dag display name search * Fix CI --- airflow/models/dag.py | 12 ++++++++++++ tests/api_fastapi/views/public/test_dags.py | 8 +++----- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 5cc5cf4431407..8d85152677e0b 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -3046,6 +3046,18 @@ def set_is_paused(self, is_paused: bool, session=NEW_SESSION) -> None: def dag_display_name(self) -> str: return self._dag_display_property_value or self.dag_id + @dag_display_name.expression # type: ignore[no-redef] + def dag_display_name(self) -> str: + """ + Expression part of the ``dag_display`` name hybrid property. + + :meta private: + """ + return case( + (self._dag_display_property_value.isnot(None), self._dag_display_property_value), + else_=self.dag_id, + ) + @classmethod @internal_api_call @provide_session diff --git a/tests/api_fastapi/views/public/test_dags.py b/tests/api_fastapi/views/public/test_dags.py index 5512f7bb13849..13520b37b3ff2 100644 --- a/tests/api_fastapi/views/public/test_dags.py +++ b/tests/api_fastapi/views/public/test_dags.py @@ -35,7 +35,6 @@ DAG1_ID = "test_dag1" DAG1_DISPLAY_NAME = "display1" DAG2_ID = "test_dag2" -DAG2_DISPLAY_NAME = "display2" DAG2_START_DATE = datetime(2021, 6, 15, tzinfo=timezone.utc) DAG3_ID = "test_dag3" TASK_ID = "op1" @@ -99,7 +98,6 @@ def setup(dag_maker, session=None) -> None: with dag_maker( DAG2_ID, - dag_display_name=DAG2_DISPLAY_NAME, schedule=None, start_date=DAG2_START_DATE, doc_md="details", @@ -150,7 +148,7 @@ def setup(dag_maker, session=None) -> None: ), # Search ({"dag_id_pattern": "1"}, 1, [DAG1_ID]), - ({"dag_display_name_pattern": "display2"}, 1, [DAG2_ID]), + ({"dag_display_name_pattern": "test_dag2"}, 1, [DAG2_ID]), ], ) def test_get_dags(test_client, query_params, expected_total_entries, expected_ids): @@ -239,7 +237,7 @@ def test_patch_dags(test_client, query_params, body, expected_status_code, expec "query_params, dag_id, expected_status_code, dag_display_name, start_date", [ ({}, "fake_dag_id", 404, "fake_dag", datetime(2023, 12, 31, tzinfo=timezone.utc)), - ({}, DAG2_ID, 200, DAG2_DISPLAY_NAME, DAG2_START_DATE), + ({}, DAG2_ID, 200, DAG2_ID, DAG2_START_DATE), ], ) def test_dag_details(test_client, query_params, dag_id, expected_status_code, dag_display_name, start_date): @@ -309,7 +307,7 @@ def test_dag_details(test_client, query_params, dag_id, expected_status_code, da "query_params, dag_id, expected_status_code, dag_display_name", [ ({}, "fake_dag_id", 404, "fake_dag"), - ({}, DAG2_ID, 200, DAG2_DISPLAY_NAME), + ({}, DAG2_ID, 200, DAG2_ID), ], ) def test_get_dag(test_client, query_params, dag_id, expected_status_code, dag_display_name): From 5e78aaa28e1b8a8d901458ec4cc18d044483db51 Mon Sep 17 00:00:00 2001 From: GPK Date: Fri, 11 Oct 2024 09:19:37 +0100 Subject: [PATCH 073/125] Check _is_canary_run/pr condition in is_legacy_ui_api_labeled method (#42921) * check _is_canary_run condition in is_legacy_ui_api_labeled method * include pr check in is_legacy_ui_api_labeled --- dev/breeze/src/airflow_breeze/utils/selective_checks.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index dfc24993f274a..18458af5f8a23 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -1370,6 +1370,12 @@ def _is_canary_run(self): def is_legacy_ui_api_labeled(self) -> bool: # Selective check for legacy UI/API updates. # It is to ping the maintainer to add the label and make them aware of the changes. + if self._is_canary_run() or self._github_event not in ( + GithubEvents.PULL_REQUEST, + GithubEvents.PULL_REQUEST_TARGET, + ): + return False + if ( self._matching_files( FileGroupForCi.LEGACY_API_FILES, CI_FILE_GROUP_MATCHES, CI_FILE_GROUP_EXCLUDES From 57aeb34eccf8b0ca1cbb50a711ab548b7dc09494 Mon Sep 17 00:00:00 2001 From: Ash Berlin-Taylor Date: Fri, 11 Oct 2024 11:11:27 +0100 Subject: [PATCH 074/125] Allow python 3.12 for the breeze release commands (#42936) I'm not 100% sure why we added this check in the first place, but it doesn't seem to be needed anymore (I've tested things locally with this removed and it all seems to behave itself) --- dev/breeze/src/airflow_breeze/utils/python_versions.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/dev/breeze/src/airflow_breeze/utils/python_versions.py b/dev/breeze/src/airflow_breeze/utils/python_versions.py index 3571bebb245dc..c7d514f5ff967 100644 --- a/dev/breeze/src/airflow_breeze/utils/python_versions.py +++ b/dev/breeze/src/airflow_breeze/utils/python_versions.py @@ -50,9 +50,6 @@ def check_python_version(): if not sys.version_info >= (3, 9): get_console().print("[error]At least Python 3.9 is required to prepare reproducible archives.\n") error = True - elif not sys.version_info < (3, 12): - get_console().print("[error]Python 3.12 is not supported.\n") - error = True if error: get_console().print( "[warning]Please reinstall Breeze using Python 3.9 - 3.11 environment.[/]\n\n" From a81066e6af29406aacc81b0a371daf639affd88b Mon Sep 17 00:00:00 2001 From: Brent Bovenzi Date: Fri, 11 Oct 2024 13:53:26 +0200 Subject: [PATCH 075/125] Render errors when getting a list of dags (#42897) * Render errors when getting a list of dags * Restore axios, prettierignore pnpm-store * Add pnpm-store to prettier ignore --- airflow/ui/.prettierignore | 1 + airflow/ui/pnpm-lock.yaml | 8 +-- .../ui/src/components/DataTable/DataTable.tsx | 7 +- airflow/ui/src/components/ErrorAlert.tsx | 67 +++++++++++++++++++ airflow/ui/src/main.tsx | 6 +- airflow/ui/src/pages/DagsList/DagsList.tsx | 4 +- 6 files changed, 83 insertions(+), 10 deletions(-) create mode 100644 airflow/ui/src/components/ErrorAlert.tsx diff --git a/airflow/ui/.prettierignore b/airflow/ui/.prettierignore index 7e860ea047193..49a8631b874a0 100644 --- a/airflow/ui/.prettierignore +++ b/airflow/ui/.prettierignore @@ -4,3 +4,4 @@ dist/ *.md *.yaml coverage/* +.pnpm-store diff --git a/airflow/ui/pnpm-lock.yaml b/airflow/ui/pnpm-lock.yaml index 6298d2ba3256f..c9f94d35f7358 100644 --- a/airflow/ui/pnpm-lock.yaml +++ b/airflow/ui/pnpm-lock.yaml @@ -2049,8 +2049,8 @@ packages: resolution: {integrity: sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==} engines: {node: '>=14'} - form-data@4.0.0: - resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} + form-data@4.0.1: + resolution: {integrity: sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==} engines: {node: '>= 6'} framer-motion@11.3.29: @@ -5012,7 +5012,7 @@ snapshots: axios@1.7.7: dependencies: follow-redirects: 1.15.9 - form-data: 4.0.0 + form-data: 4.0.1 proxy-from-env: 1.1.0 transitivePeerDependencies: - debug @@ -5697,7 +5697,7 @@ snapshots: cross-spawn: 7.0.3 signal-exit: 4.1.0 - form-data@4.0.0: + form-data@4.0.1: dependencies: asynckit: 0.4.0 combined-stream: 1.0.8 diff --git a/airflow/ui/src/components/DataTable/DataTable.tsx b/airflow/ui/src/components/DataTable/DataTable.tsx index 7f4c3cf083966..2ed1a4f16ea3a 100644 --- a/airflow/ui/src/components/DataTable/DataTable.tsx +++ b/airflow/ui/src/components/DataTable/DataTable.tsx @@ -41,6 +41,7 @@ type DataTableProps = { readonly columns: Array>; readonly data: Array; readonly displayMode?: "card" | "table"; + readonly errorMessage?: ReactNode | string; readonly getRowCanExpand?: (row: Row) => boolean; readonly initialState?: TableState; readonly isFetching?: boolean; @@ -62,6 +63,7 @@ export const DataTable = ({ columns, data, displayMode = "table", + errorMessage, getRowCanExpand = defaultGetRowCanExpand, initialState, isFetching, @@ -127,10 +129,9 @@ export const DataTable = ({ Boolean(isFetching) && !Boolean(isLoading) ? "visible" : "hidden" } /> + {errorMessage} {!Boolean(isLoading) && !rows.length && ( - - {noRowsMessage ?? `No ${modelName}s found.`} - + {noRowsMessage ?? `No ${modelName}s found.`} )} {display === "table" && } {display === "card" && cardDef !== undefined && ( diff --git a/airflow/ui/src/components/ErrorAlert.tsx b/airflow/ui/src/components/ErrorAlert.tsx new file mode 100644 index 0000000000000..3128a2cdec2c4 --- /dev/null +++ b/airflow/ui/src/components/ErrorAlert.tsx @@ -0,0 +1,67 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Alert, AlertIcon } from "@chakra-ui/react"; +import type { ApiError } from "openapi-gen/requests/core/ApiError"; +import type { + HTTPExceptionResponse, + HTTPValidationError, +} from "openapi-gen/requests/types.gen"; + +type ExpandedApiError = { + body: HTTPExceptionResponse | HTTPValidationError; +} & ApiError; + +type Props = { + readonly error?: unknown; +}; + +export const ErrorAlert = ({ error: err }: Props) => { + const error = err as ExpandedApiError; + + if (!Boolean(error)) { + return undefined; + } + + const details = error.body.detail; + let detailMessage; + + if (details !== undefined) { + if (typeof details === "string") { + detailMessage = details; + } else if (Array.isArray(details)) { + detailMessage = details.map( + (detail) => ` + ${detail.loc.join(".")} ${detail.msg}`, + ); + } else { + detailMessage = Object.keys(details).map( + (key) => `${key}: ${details[key] as string}`, + ); + } + } + + return ( + + + {error.message} +
+ {detailMessage} +
+ ); +}; diff --git a/airflow/ui/src/main.tsx b/airflow/ui/src/main.tsx index daf4bcd024cd6..12434ca7bade2 100644 --- a/airflow/ui/src/main.tsx +++ b/airflow/ui/src/main.tsx @@ -18,7 +18,7 @@ */ import { ChakraProvider } from "@chakra-ui/react"; import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; -import axios, { type AxiosError, type AxiosResponse } from "axios"; +import axios, { type AxiosError } from "axios"; import { createRoot } from "react-dom/client"; import { BrowserRouter } from "react-router-dom"; @@ -45,7 +45,7 @@ const queryClient = new QueryClient({ // redirect to login page if the API responds with unauthorized or forbidden errors axios.interceptors.response.use( - (response: AxiosResponse) => response, + (response) => response, (error: AxiosError) => { if (error.response?.status === 403 || error.response?.status === 401) { const params = new URLSearchParams(); @@ -53,6 +53,8 @@ axios.interceptors.response.use( params.set("next", globalThis.location.href); globalThis.location.replace(`/login?${params.toString()}`); } + + return Promise.reject(error); }, ); diff --git a/airflow/ui/src/pages/DagsList/DagsList.tsx b/airflow/ui/src/pages/DagsList/DagsList.tsx index 178663baf7478..60d6ef9c4f437 100644 --- a/airflow/ui/src/pages/DagsList/DagsList.tsx +++ b/airflow/ui/src/pages/DagsList/DagsList.tsx @@ -34,6 +34,7 @@ import { DataTable } from "src/components/DataTable"; import { ToggleTableDisplay } from "src/components/DataTable/ToggleTableDisplay"; import type { CardDef } from "src/components/DataTable/types"; import { useTableURLState } from "src/components/DataTable/useTableUrlState"; +import { ErrorAlert } from "src/components/ErrorAlert"; import { SearchBar } from "src/components/SearchBar"; import { TogglePause } from "src/components/TogglePause"; import { pluralize } from "src/utils/pluralize"; @@ -113,7 +114,7 @@ export const DagsList = () => { const [sort] = sorting; const orderBy = sort ? `${sort.desc ? "-" : ""}${sort.id}` : undefined; - const { data, isFetching, isLoading } = useDagServiceGetDags({ + const { data, error, isFetching, isLoading } = useDagServiceGetDags({ lastDagRunState, limit: pagination.pageSize, offset: pagination.pageIndex * pagination.pageSize, @@ -169,6 +170,7 @@ export const DagsList = () => { columns={columns} data={data?.dags ?? []} displayMode={display} + errorMessage={} initialState={tableURLState} isFetching={isFetching} isLoading={isLoading} From 39f8e1d487ab8f1969aaec512fdffacca989813c Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Fri, 11 Oct 2024 13:35:00 +0100 Subject: [PATCH 076/125] Remove the referrer from Webserver to Scarf (#42901) This will make sure we don't receive any information about the Webserver URL sending the info like the number of plugins and such. --- airflow/www/templates/airflow/dags.html | 2 +- docs/apache-airflow/faq.rst | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/airflow/www/templates/airflow/dags.html b/airflow/www/templates/airflow/dags.html index c629936df7c00..6da7090a14781 100644 --- a/airflow/www/templates/airflow/dags.html +++ b/airflow/www/templates/airflow/dags.html @@ -489,6 +489,6 @@

{{ page_title }}

} {% if scarf_url %} - + {% endif %} {% endblock %} diff --git a/docs/apache-airflow/faq.rst b/docs/apache-airflow/faq.rst index 5d4aea6ddd562..0b2c76765e704 100644 --- a/docs/apache-airflow/faq.rst +++ b/docs/apache-airflow/faq.rst @@ -548,4 +548,3 @@ The telemetry data collected is limited to the following: - Number of DAGs - Number of Airflow plugins - Number of timetables, Flask blueprints, Flask AppBuilder views, and Flask Appbuilder menu items from Airflow plugins -- and information from tracking pixels like URL / IP-address as documented in the `Scarf Privacy Policy `__. From a052d9eebe3e7cd601c0718a4e48dbeff87deaaf Mon Sep 17 00:00:00 2001 From: vlieven Date: Fri, 11 Oct 2024 17:12:35 +0200 Subject: [PATCH 077/125] Split providers out of the main "airflow/" tree into a UV workspace project (#42505) (#42624) This is only a partial split so far. It moves all the code and tests, but leaves the creation of `core/` to a separate PR as this is already large enough. In addition to the straight file rename the other changes I had to make here are: - Some mypy/typing fixes. Mypy can be fragile about what it picks up when, so maybe some of those changes were caused by that. But the typing changes aren't large. - Improve typing in common.sql type stub Again, likely a mypy file oddity, but the types should be safe - Removed the `check-providers-init-file-missing` check This isn't needed now that airflow/providers shouldn't exist at all in the main tree. - Create a "dev.tests_common" package that contains helper files and common pytest fixtures Since the provider tests are no longer under tests/ they don't automatically share the fixtures from the parent `tests/conftest.py` so they needed extracted. Ditto for `tests.test_utils` -- they can't be easily imported in provider tests anymore, so they are moved to a more explicit shared location. In future we should switch how the CI image is built to make better use of UV caching than our own approach as that would remvoe a lot of custom code. Co-authored-by: Ash Berlin-Taylor Co-authored-by: Ryan Hatter <25823361+RNHTTR@users.noreply.github.com> --- .../executors/kubernetes_executor.py | 1 + .../executors/kubernetes_executor_utils.py | 10 -------- .../executors/test_kubernetes_executor.py | 23 ++++++++++++------- 3 files changed, 16 insertions(+), 18 deletions(-) diff --git a/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py b/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py index 7c6e0d8852e5f..4301a54f02922 100644 --- a/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py +++ b/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py @@ -482,6 +482,7 @@ def _change_state( self.running.remove(key) except KeyError: self.log.debug("TI key not in running, not adding to event_buffer: %s", key) + return # If we don't have a TI state, look it up from the db. event_buffer expects the TI state if state is None: diff --git a/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py b/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py index 495c09ea23e7c..42d2a97dffed1 100644 --- a/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py +++ b/providers/src/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py @@ -273,16 +273,6 @@ def process_status( (pod_name, namespace, TaskInstanceState.FAILED, annotations, resource_version) ) elif status == "Succeeded": - # We get multiple events once the pod hits a terminal state, and we only want to - # send it along to the scheduler once. - # If our event type is DELETED, or the pod has a deletion timestamp, we've already - # seen the initial Succeeded event and sent it along to the scheduler. - if event["type"] == "DELETED" or pod.metadata.deletion_timestamp: - self.log.info( - "Skipping event for Succeeded pod %s - event for this pod already sent to executor", - pod_name, - ) - return self.log.info("Event: %s Succeeded, annotations: %s", pod_name, annotations_string) self.watcher_queue.put((pod_name, namespace, None, annotations, resource_version)) elif status == "Running": diff --git a/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py b/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py index 5240bf0faecb2..07b42ee3fc22a 100644 --- a/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py +++ b/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py @@ -784,6 +784,21 @@ def test_change_state_adopted(self, mock_delete_pod, mock_get_kube_client, mock_ finally: executor.end() + @pytest.mark.db_test + @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher") + @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client") + def test_change_state_key_not_in_running(self, mock_get_kube_client, mock_kubernetes_job_watcher): + executor = self.kubernetes_executor + executor.start() + try: + key = ("dag_id", "task_id", "run_id", "try_number1") + executor.running = set() + executor._change_state(key, State.SUCCESS, "pod_name", "default") + assert executor.event_buffer.get(key) is None + assert executor.running == set() + finally: + executor.end() + @pytest.mark.db_test @pytest.mark.parametrize( "multi_namespace_mode_namespace_list, watchers_keys", @@ -1858,14 +1873,6 @@ def test_process_status_succeeded(self): # We don't know the TI state, so we send in None self.assert_watcher_queue_called_once_with_state(None) - def test_process_status_succeeded_dedup_timestamp(self): - self.pod.status.phase = "Succeeded" - self.pod.metadata.deletion_timestamp = timezone.utcnow() - self.events.append({"type": "MODIFIED", "object": self.pod}) - - self._run() - self.watcher.watcher_queue.put.assert_not_called() - @pytest.mark.parametrize( "ti_state", [ From 15df46590ea13c2e8b71324fad0b77da8eaa7167 Mon Sep 17 00:00:00 2001 From: GPK Date: Fri, 11 Oct 2024 20:03:42 +0100 Subject: [PATCH 078/125] increase backoff_factor and add try/catch in k8s tests (#42940) --- kubernetes_tests/test_base.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/kubernetes_tests/test_base.py b/kubernetes_tests/test_base.py index ac311daa52f8c..6929128173cbf 100644 --- a/kubernetes_tests/test_base.py +++ b/kubernetes_tests/test_base.py @@ -29,6 +29,7 @@ import requests import requests.exceptions from requests.adapters import HTTPAdapter +from urllib3.exceptions import MaxRetryError from urllib3.util.retry import Retry CLUSTER_FORWARDED_PORT = os.environ.get("CLUSTER_FORWARDED_PORT") or "8080" @@ -125,7 +126,7 @@ def _get_session_with_retries(self): session.auth = ("admin", "admin") retries = Retry( total=3, - backoff_factor=1, + backoff_factor=10, status_forcelist=[404], allowed_methods=Retry.DEFAULT_ALLOWED_METHODS | frozenset(["PATCH", "POST"]), ) @@ -225,10 +226,16 @@ def start_dag(self, dag_id, host): print(f"Calling [start_dag]#1 {patch_string}") max_attempts = 10 result = {} + # This loop retries until the DAG parser finishes with max_attempts and the DAG is available for execution. + # Keep the try/catch block, as the session object has a default retry configuration. + # If a MaxRetryError is raised, it can be safely ignored, indicating that the DAG is not yet parsed. while max_attempts: - result = self.session.patch(patch_string, json={"is_paused": False}) - if result.status_code == 200: - break + try: + result = self.session.patch(patch_string, json={"is_paused": False}) + if result.status_code == 200: + break + except MaxRetryError: + pass time.sleep(30) max_attempts -= 1 From 92a82eb9b8f33abd91e2eadd798b79433fcfb495 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Fri, 11 Oct 2024 21:22:05 +0200 Subject: [PATCH 079/125] Consistent python version checks and troubleshooting (#42944) Follow up after #42766 and #42936. * We do not have to check for minimum Python version for Python 3.9 any more (as we do not support 3.8 any more) * While Python 3.12 is not yet fully supported by Apache Beam, we should still not allow it for releasing providers, but all other commands should support Python 3.12 * When you had pre-commit installed with Python 3.8 before, various errors might appear when running pre-commit. Troubleshooting was added quoting the errors and explaining what to do. --- dev/breeze/doc/04_troubleshooting.rst | 32 +++++++++++++++++++ .../commands/release_management_commands.py | 2 +- .../airflow_breeze/utils/python_versions.py | 12 +++---- .../providers/apache/beam/provider.yaml | 5 ++- 4 files changed, 42 insertions(+), 9 deletions(-) diff --git a/dev/breeze/doc/04_troubleshooting.rst b/dev/breeze/doc/04_troubleshooting.rst index fd0b1dfa401dc..fd5f92b03ea35 100644 --- a/dev/breeze/doc/04_troubleshooting.rst +++ b/dev/breeze/doc/04_troubleshooting.rst @@ -72,6 +72,38 @@ describe your problem. stated in `This comment `_ and allows to run Breeze with no problems. +Cannot import name 'cache' or Python >=3.9 required +--------------------------------------------------- + +When you see this error: + +.. code-block:: + + ImportError: cannot import name 'cache' from 'functools' (/Users/jarek/Library/Application Support/hatch/pythons/3.8/python/lib/python3.8/functools.py) + +or + +.. code-block:: + + ERROR: Package 'blacken-docs' requires a different Python: 3.8.18 not in '>=3.9' + + +It means that your pre-commit hook is installed with (already End-Of-Life) Python 3.8 and you should reinstall +it and clean pre-commit cache. + +This can be done (if you use ``pipx`` to install ``pre-commit``): + +.. code-block:: bash + + pipx uninstall pre-commit + pipx install pre-commit --python $(which python3.9) --force + pre-commit clean + pre-commit install + +If you installed ``pre-commit`` differently, you should remove and reinstall +it (and clean cache) in the way you installed it. + + Bad Interpreter Error --------------------- diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index b0a1ca2877332..95ecf30144ab5 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -797,7 +797,7 @@ def prepare_provider_packages( skip_tag_check: bool, version_suffix_for_pypi: str, ): - check_python_version() + check_python_version(release_provider_packages=True) perform_environment_checks() fix_ownership_using_docker() cleanup_python_generated_files() diff --git a/dev/breeze/src/airflow_breeze/utils/python_versions.py b/dev/breeze/src/airflow_breeze/utils/python_versions.py index c7d514f5ff967..d144139b06816 100644 --- a/dev/breeze/src/airflow_breeze/utils/python_versions.py +++ b/dev/breeze/src/airflow_breeze/utils/python_versions.py @@ -45,14 +45,12 @@ def get_python_version_list(python_versions: str) -> list[str]: return python_version_list -def check_python_version(): - error = False - if not sys.version_info >= (3, 9): - get_console().print("[error]At least Python 3.9 is required to prepare reproducible archives.\n") - error = True - if error: +def check_python_version(release_provider_packages: bool = False): + if not sys.version_info < (3, 12) and release_provider_packages: + get_console().print("[error]Python 3.12 is not supported.\n") get_console().print( - "[warning]Please reinstall Breeze using Python 3.9 - 3.11 environment.[/]\n\n" + "[warning]Please reinstall Breeze using Python 3.9 - 3.11 environment because not all " + "provider packages support Python 3.12 yet.[/]\n\n" "For example:\n\n" "pipx uninstall apache-airflow-breeze\n" "pipx install --python $(which python3.9) -e ./dev/breeze --force\n" diff --git a/providers/src/airflow/providers/apache/beam/provider.yaml b/providers/src/airflow/providers/apache/beam/provider.yaml index ae0c4d37005b8..684882f75b83a 100644 --- a/providers/src/airflow/providers/apache/beam/provider.yaml +++ b/providers/src/airflow/providers/apache/beam/provider.yaml @@ -72,7 +72,10 @@ additional-extras: - apache-beam[gcp] # Apache Beam currently does not support Python 3.12 -# There is an issue tracking it https://github.com/apache/beam/issues/29149 +# There is an issue tracking it https://github.com/apache/beam/issues/29149. +# While the original issue above is closed, Apache Beam still does not support Python 3.12 +# because the dill version used by them break our PythonVirtualenvOperator when dill is enabled +# See https://github.com/apache/beam/issues/32617 excluded-python-versions: ['3.12'] integrations: From aab1e370c500e01cfbeb7ddbd15dd1ba24670667 Mon Sep 17 00:00:00 2001 From: Andre Miranda Date: Fri, 11 Oct 2024 15:39:29 -0400 Subject: [PATCH 080/125] Update README.rst (#42941) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit grammar update for clarity 😄 👍 --- docs/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/README.rst b/docs/README.rst index 99196721958db..2e40799741166 100644 --- a/docs/README.rst +++ b/docs/README.rst @@ -20,7 +20,7 @@ Documentation This directory contains documentation for the `Apache Airflow project `__ and the providers packages that are closely related to it. You can contribute to the Airflow Docs in the same way and for the same reasons as contributing code; Docs contributions that improve existing content, fix bugs, and create new content are welcomed and encouraged. -This README gives an overview about how Airflow uses `Sphinx `__ to write and build docs. It also includes instructions for how to make Docs changes locally or with the GitHub UI. +This README gives an overview about how Airflow uses `Sphinx `__ to write and build docs. It also includes instructions on how to make Docs changes locally or with the GitHub UI. Development documentation preview ================================== From b9b24f4ee897ea2cc2e27b2cc875b2da1096c9ee Mon Sep 17 00:00:00 2001 From: Daniel Standish <15932138+dstandish@users.noreply.github.com> Date: Fri, 11 Oct 2024 14:37:59 -0700 Subject: [PATCH 081/125] Add a "backfill create" command (#42922) I create a new command group "backfill" for management of backfills. The first action is "create" which creates a backfill. Some others may follow such as pause / cancel. --- airflow/cli/cli_config.py | 137 ++++------- airflow/cli/commands/backfill_command.py | 44 ++++ airflow/cli/commands/dag_command.py | 102 +------- airflow/executors/executor_loader.py | 4 +- airflow/models/dag.py | 3 - tests/cli/commands/test_backfill_command.py | 89 +++++++ tests/cli/commands/test_dag_command.py | 251 -------------------- tests/jobs/test_backfill_job.py | 20 -- 8 files changed, 176 insertions(+), 474 deletions(-) create mode 100644 airflow/cli/commands/backfill_command.py create mode 100644 tests/cli/commands/test_backfill_command.py diff --git a/airflow/cli/cli_config.py b/airflow/cli/cli_config.py index 4b42f2b96fb2c..e68a464f61bb5 100644 --- a/airflow/cli/cli_config.py +++ b/airflow/cli/cli_config.py @@ -264,7 +264,7 @@ def string_lower_type(val): help="The number of next execution datetimes to show", ) -# backfill +# misc ARG_MARK_SUCCESS = Arg( ("-m", "--mark-success"), help="Mark jobs as succeeded without running them", action="store_true" ) @@ -300,78 +300,34 @@ def string_lower_type(val): ) ARG_VERBOSE = Arg(("-v", "--verbose"), help="Make logging output more verbose", action="store_true") ARG_LOCAL = Arg(("-l", "--local"), help="Run the task using the LocalExecutor", action="store_true") -ARG_DONOT_PICKLE = Arg( - ("-x", "--donot-pickle"), - help=( - "Do not attempt to pickle the DAG object to send over " - "to the workers, just tell the workers to run their version " - "of the code" - ), - action="store_true", -) -ARG_BF_IGNORE_DEPENDENCIES = Arg( - ("-i", "--ignore-dependencies"), - help=( - "Skip upstream tasks, run only the tasks " - "matching the regexp. Only works in conjunction " - "with task_regex" - ), - action="store_true", -) ARG_POOL = Arg(("--pool",), "Resource pool to use") -ARG_DELAY_ON_LIMIT = Arg( - ("--delay-on-limit",), - help=( - "Amount of time in seconds to wait when the limit " - "on maximum active dag runs (max_active_runs) has " - "been reached before trying to execute a dag run " - "again" - ), - type=float, - default=1.0, -) -ARG_RESET_DAG_RUN = Arg( - ("--reset-dagruns",), - help=( - "if set, the backfill will delete existing " - "backfill-related DAG runs and start " - "anew with fresh, running DAG runs" - ), - action="store_true", -) -ARG_RERUN_FAILED_TASKS = Arg( - ("--rerun-failed-tasks",), - help=( - "if set, the backfill will auto-rerun " - "all the failed tasks for the backfill date range " - "instead of throwing exceptions" - ), - action="store_true", -) -ARG_CONTINUE_ON_FAILURES = Arg( - ("--continue-on-failures",), - help=("if set, the backfill will keep going even if some of the tasks failed"), - action="store_true", + + +# backfill +ARG_BACKFILL_DAG = Arg(flags=("--dag",), help="The dag to backfill.", required=True) +ARG_BACKFILL_FROM_DATE = Arg( + ("--from-date",), help="Earliest logical date to backfill.", type=parsedate, required=True ) -ARG_DISABLE_RETRY = Arg( - ("--disable-retry",), - help=("if set, the backfill will set tasks as failed without retrying."), - action="store_true", +ARG_BACKFILL_TO_DATE = Arg( + ("--to-date",), help="Latest logical date to backfill", type=parsedate, required=True ) +ARG_DAG_RUN_CONF = Arg(flags=("--dag-run-conf",), help="JSON dag run configuration.") ARG_RUN_BACKWARDS = Arg( - ( - "-B", - "--run-backwards", - ), + flags=("--run-backwards",), help=( - "if set, the backfill will run tasks from the most " - "recent day first. if there are tasks that depend_on_past " - "this option will throw an exception" + "If set, the backfill will run tasks from the most recent logical date first. " + "Not supported if there are tasks that depend_on_past." ), action="store_true", ) +ARG_MAX_ACTIVE_RUNS = Arg( + ("--max-active-runs",), + type=positive_int(allow_zero=False), + help="Max active runs for this backfill.", +) +# misc ARG_TREAT_DAG_ID_AS_REGEX = Arg( ("--treat-dag-id-as-regex",), help=("if set, dag_id will be treated as regex instead of an exact string"), @@ -1056,6 +1012,22 @@ class GroupCommand(NamedTuple): CLICommand = Union[ActionCommand, GroupCommand] +BACKFILL_COMMANDS = ( + ActionCommand( + name="create", + help="Create a backfill for a dag.", + description="Run subsections of a DAG for a specified date range.", + func=lazy_load_command("airflow.cli.commands.backfill_command.create_backfill"), + args=( + ARG_BACKFILL_DAG, + ARG_BACKFILL_FROM_DATE, + ARG_BACKFILL_TO_DATE, + ARG_DAG_RUN_CONF, + ARG_RUN_BACKWARDS, + ARG_MAX_ACTIVE_RUNS, + ), + ), +) DAGS_COMMANDS = ( ActionCommand( name="details", @@ -1227,40 +1199,6 @@ class GroupCommand(NamedTuple): ARG_VERBOSE, ), ), - ActionCommand( - name="backfill", - help="Run subsections of a DAG for a specified date range", - description=( - "Run subsections of a DAG for a specified date range. If reset_dag_run option is used, " - "backfill will first prompt users whether airflow should clear all the previous dag_run and " - "task_instances within the backfill date range. If rerun_failed_tasks is used, backfill " - "will auto re-run the previous failed task instances within the backfill date range" - ), - func=lazy_load_command("airflow.cli.commands.dag_command.dag_backfill"), - args=( - ARG_DAG_ID, - ARG_TASK_REGEX, - ARG_START_DATE, - ARG_END_DATE, - ARG_MARK_SUCCESS, - ARG_LOCAL, - ARG_DONOT_PICKLE, - ARG_YES, - ARG_CONTINUE_ON_FAILURES, - ARG_DISABLE_RETRY, - ARG_BF_IGNORE_DEPENDENCIES, - ARG_SUBDIR, - ARG_POOL, - ARG_DELAY_ON_LIMIT, - ARG_DRY_RUN, - ARG_VERBOSE, - ARG_CONF, - ARG_RESET_DAG_RUN, - ARG_RERUN_FAILED_TASKS, - ARG_RUN_BACKWARDS, - ARG_TREAT_DAG_ID_AS_REGEX, - ), - ), ActionCommand( name="test", help="Execute one single DagRun", @@ -1913,6 +1851,11 @@ class GroupCommand(NamedTuple): help="Manage DAGs", subcommands=DAGS_COMMANDS, ), + GroupCommand( + name="backfill", + help="Manage backfills", + subcommands=BACKFILL_COMMANDS, + ), GroupCommand( name="tasks", help="Manage tasks", diff --git a/airflow/cli/commands/backfill_command.py b/airflow/cli/commands/backfill_command.py new file mode 100644 index 0000000000000..8714ed5585004 --- /dev/null +++ b/airflow/cli/commands/backfill_command.py @@ -0,0 +1,44 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import logging +import signal + +from airflow import settings +from airflow.models.backfill import _create_backfill +from airflow.utils import cli as cli_utils +from airflow.utils.cli import sigint_handler +from airflow.utils.providers_configuration_loader import providers_configuration_loaded + + +@cli_utils.action_cli +@providers_configuration_loaded +def create_backfill(args) -> None: + """Create backfill job or dry run for a DAG or list of DAGs using regex.""" + logging.basicConfig(level=settings.LOGGING_LEVEL, format=settings.SIMPLE_LOG_FORMAT) + signal.signal(signal.SIGTERM, sigint_handler) + + _create_backfill( + dag_id=args.dag, + from_date=args.from_date, + to_date=args.to_date, + max_active_runs=args.max_active_runs, + reverse=args.run_backwards, + dag_run_conf=args.dag_run_conf, + ) diff --git a/airflow/cli/commands/dag_command.py b/airflow/cli/commands/dag_command.py index 0697709754765..83d0430a717bd 100644 --- a/airflow/cli/commands/dag_command.py +++ b/airflow/cli/commands/dag_command.py @@ -23,7 +23,6 @@ import json import logging import operator -import signal import subprocess import sys from typing import TYPE_CHECKING @@ -31,128 +30,31 @@ import re2 from sqlalchemy import delete, select -from airflow import settings from airflow.api.client import get_current_api_client from airflow.api_connexion.schemas.dag_schema import dag_schema from airflow.cli.simple_table import AirflowConsole -from airflow.configuration import conf from airflow.exceptions import AirflowException from airflow.jobs.job import Job from airflow.models import DagBag, DagModel, DagRun, TaskInstance -from airflow.models.dag import DAG from airflow.models.serialized_dag import SerializedDagModel from airflow.utils import cli as cli_utils, timezone -from airflow.utils.cli import get_dag, get_dags, process_subdir, sigint_handler, suppress_logs_and_warning +from airflow.utils.cli import get_dag, process_subdir, suppress_logs_and_warning from airflow.utils.dag_parsing_context import _airflow_parsing_context_manager from airflow.utils.dot_renderer import render_dag, render_dag_dependencies from airflow.utils.helpers import ask_yesno from airflow.utils.providers_configuration_loader import providers_configuration_loaded from airflow.utils.session import NEW_SESSION, create_session, provide_session from airflow.utils.state import DagRunState -from airflow.utils.types import DagRunTriggeredByType if TYPE_CHECKING: from graphviz.dot import Dot from sqlalchemy.orm import Session + from airflow.models.dag import DAG from airflow.timetables.base import DataInterval - log = logging.getLogger(__name__) -def _run_dag_backfill(dags: list[DAG], args) -> None: - # If only one date is passed, using same as start and end - args.end_date = args.end_date or args.start_date - args.start_date = args.start_date or args.end_date - - run_conf = None - if args.conf: - run_conf = json.loads(args.conf) - - for dag in dags: - if args.task_regex: - dag = dag.partial_subset( - task_ids_or_regex=args.task_regex, include_upstream=not args.ignore_dependencies - ) - if not dag.task_dict: - raise AirflowException( - f"There are no tasks that match '{args.task_regex}' regex. Nothing to run, exiting..." - ) - - if args.dry_run: - print(f"Dry run of DAG {dag.dag_id} on {args.start_date}") - dagrun_infos = dag.iter_dagrun_infos_between(earliest=args.start_date, latest=args.end_date) - for dagrun_info in dagrun_infos: - dr = DagRun( - dag.dag_id, - execution_date=dagrun_info.logical_date, - data_interval=dagrun_info.data_interval, - triggered_by=DagRunTriggeredByType.CLI, - ) - - for task in dag.tasks: - print(f"Task {task.task_id} located in DAG {dag.dag_id}") - ti = TaskInstance(task, run_id=None) - ti.dag_run = dr - ti.dry_run() - else: - if args.reset_dagruns: - DAG.clear_dags( - [dag], - start_date=args.start_date, - end_date=args.end_date, - confirm_prompt=not args.yes, - dag_run_state=DagRunState.QUEUED, - ) - - try: - dag.run( - start_date=args.start_date, - end_date=args.end_date, - mark_success=args.mark_success, - local=args.local, - donot_pickle=(args.donot_pickle or conf.getboolean("core", "donot_pickle")), - ignore_first_depends_on_past=args.ignore_first_depends_on_past, - ignore_task_deps=args.ignore_dependencies, - pool=args.pool, - delay_on_limit_secs=args.delay_on_limit, - verbose=args.verbose, - conf=run_conf, - rerun_failed_tasks=args.rerun_failed_tasks, - run_backwards=args.run_backwards, - continue_on_failures=args.continue_on_failures, - disable_retry=args.disable_retry, - ) - except ValueError as vr: - print(str(vr)) - sys.exit(1) - - -@cli_utils.action_cli -@providers_configuration_loaded -def dag_backfill(args, dag: list[DAG] | DAG | None = None) -> None: - """Create backfill job or dry run for a DAG or list of DAGs using regex.""" - logging.basicConfig(level=settings.LOGGING_LEVEL, format=settings.SIMPLE_LOG_FORMAT) - signal.signal(signal.SIGTERM, sigint_handler) - args.ignore_first_depends_on_past = True - - if not args.start_date and not args.end_date: - raise AirflowException("Provide a start_date and/or end_date") - - if not dag: - dags = get_dags(args.subdir, dag_id=args.dag_id, use_regex=args.treat_dag_id_as_regex) - elif isinstance(dag, list): - dags = dag - else: - dags = [dag] - del dag - - dags.sort(key=lambda d: d.dag_id) - _run_dag_backfill(dags, args) - if len(dags) > 1: - log.info("All of the backfills are done.") - - @cli_utils.action_cli @providers_configuration_loaded def dag_trigger(args) -> None: diff --git a/airflow/executors/executor_loader.py b/airflow/executors/executor_loader.py index ec79860918b80..4a940793df27f 100644 --- a/airflow/executors/executor_loader.py +++ b/airflow/executors/executor_loader.py @@ -171,10 +171,8 @@ def set_default_executor(cls, executor: BaseExecutor) -> None: """ Externally set an executor to be the default. - This is used in rare cases such as dag.run which allows, as a user convenience, to provide + This is used in rare cases such as dag.test which allows, as a user convenience, to provide the executor by cli/argument instead of Airflow configuration - - todo: given comments above, is this needed anymore since DAG.run is removed? """ exec_class_name = executor.__class__.__qualname__ exec_name = ExecutorName(f"{executor.__module__}.{exec_class_name}") diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 8d85152677e0b..f5def92ea92a7 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -2293,9 +2293,6 @@ def _remove_task(self, task_id: str) -> None: self.task_count = len(self.task_dict) - def run(self, *args, **kwargs): - """Leaving this here to be removed in other PR for simpler review.""" - def cli(self): """Exposes a CLI specific to this DAG.""" check_cycle(self) diff --git a/tests/cli/commands/test_backfill_command.py b/tests/cli/commands/test_backfill_command.py new file mode 100644 index 0000000000000..c01e1e4f9d07b --- /dev/null +++ b/tests/cli/commands/test_backfill_command.py @@ -0,0 +1,89 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import argparse +from datetime import datetime +from unittest import mock + +import pendulum +import pytest + +import airflow.cli.commands.backfill_command +from airflow.cli import cli_parser +from airflow.models import DagBag +from airflow.utils import timezone + +from dev.tests_common.test_utils.db import clear_db_backfills, clear_db_dags, clear_db_runs + +DEFAULT_DATE = timezone.make_aware(datetime(2015, 1, 1), timezone=timezone.utc) +if pendulum.__version__.startswith("3"): + DEFAULT_DATE_REPR = DEFAULT_DATE.isoformat(sep=" ") +else: + DEFAULT_DATE_REPR = DEFAULT_DATE.isoformat() + +# TODO: Check if tests needs side effects - locally there's missing DAG + +pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] + + +class TestCliBackfill: + parser: argparse.ArgumentParser + + @classmethod + def setup_class(cls): + cls.dagbag = DagBag(include_examples=True) + cls.dagbag.sync_to_db() + cls.parser = cli_parser.get_parser() + + @classmethod + def teardown_class(cls) -> None: + clear_db_runs() + clear_db_dags() + clear_db_backfills() + + def setup_method(self): + clear_db_runs() # clean-up all dag run before start each test + clear_db_dags() + clear_db_backfills() + + @mock.patch("airflow.cli.commands.backfill_command._create_backfill") + def test_backfill(self, mock_create): + airflow.cli.commands.backfill_command.create_backfill( + self.parser.parse_args( + [ + "backfill", + "create", + "--dag", + "example_bash_operator", + "--from-date", + DEFAULT_DATE.isoformat(), + "--to-date", + DEFAULT_DATE.isoformat(), + ] + ) + ) + + mock_create.assert_called_once_with( + dag_id="example_bash_operator", + from_date=DEFAULT_DATE, + to_date=DEFAULT_DATE, + max_active_runs=None, + reverse=False, + dag_run_conf=None, + ) diff --git a/tests/cli/commands/test_dag_command.py b/tests/cli/commands/test_dag_command.py index f0c9a18c1a567..e9d10a2e33c18 100644 --- a/tests/cli/commands/test_dag_command.py +++ b/tests/cli/commands/test_dag_command.py @@ -124,139 +124,6 @@ def test_reserialize_should_support_subdir_argument(self): serialized_dags_after_reserialize = session.query(SerializedDagModel).all() assert len(serialized_dags_after_reserialize) == 1 # Serialized DAG back - @mock.patch("airflow.cli.commands.dag_command.DAG.run") - def test_backfill(self, mock_run): - dag_command.dag_backfill( - self.parser.parse_args( - ["dags", "backfill", "example_bash_operator", "--start-date", DEFAULT_DATE.isoformat()] - ) - ) - - mock_run.assert_called_once_with( - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE, - conf=None, - delay_on_limit_secs=1.0, - donot_pickle=False, - ignore_first_depends_on_past=True, - ignore_task_deps=False, - local=False, - mark_success=False, - pool=None, - rerun_failed_tasks=False, - run_backwards=False, - verbose=False, - continue_on_failures=False, - disable_retry=False, - ) - mock_run.reset_mock() - dag = self.dagbag.get_dag("example_bash_operator") - - with contextlib.redirect_stdout(StringIO()) as stdout: - dag_command.dag_backfill( - self.parser.parse_args( - [ - "dags", - "backfill", - "example_bash_operator", - "--task-regex", - "runme_0", - "--dry-run", - "--start-date", - DEFAULT_DATE.isoformat(), - ] - ), - dag=dag, - ) - - output = stdout.getvalue() - assert f"Dry run of DAG example_bash_operator on {DEFAULT_DATE_REPR}\n" in output - assert "Task runme_0 located in DAG example_bash_operator\n" in output - - mock_run.assert_not_called() # Dry run shouldn't run the backfill - - dag_command.dag_backfill( - self.parser.parse_args( - [ - "dags", - "backfill", - "example_bash_operator", - "--dry-run", - "--start-date", - DEFAULT_DATE.isoformat(), - ] - ), - dag=dag, - ) - - mock_run.assert_not_called() # Dry run shouldn't run the backfill - - dag_command.dag_backfill( - self.parser.parse_args( - [ - "dags", - "backfill", - "example_bash_operator", - "--local", - "--start-date", - DEFAULT_DATE.isoformat(), - ] - ), - dag=dag, - ) - - mock_run.assert_called_once_with( - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE, - conf=None, - delay_on_limit_secs=1.0, - donot_pickle=False, - ignore_first_depends_on_past=True, - ignore_task_deps=False, - local=True, - mark_success=False, - pool=None, - rerun_failed_tasks=False, - run_backwards=False, - verbose=False, - continue_on_failures=False, - disable_retry=False, - ) - mock_run.reset_mock() - - with contextlib.redirect_stdout(StringIO()) as stdout: - dag_command.dag_backfill( - self.parser.parse_args( - [ - "dags", - "backfill", - "example_branch_(python_){0,1}operator(_decorator){0,1}", - "--task-regex", - "run_this_first", - "--dry-run", - "--treat-dag-id-as-regex", - "--start-date", - DEFAULT_DATE.isoformat(), - ] - ), - ) - - output = stdout.getvalue() - - assert f"Dry run of DAG example_branch_python_operator_decorator on {DEFAULT_DATE_REPR}\n" in output - assert "Task run_this_first located in DAG example_branch_python_operator_decorator\n" in output - assert f"Dry run of DAG example_branch_operator on {DEFAULT_DATE_REPR}\n" in output - assert "Task run_this_first located in DAG example_branch_operator\n" in output - - @mock.patch("airflow.cli.commands.dag_command.get_dag") - def test_backfill_fails_without_loading_dags(self, mock_get_dag): - cli_args = self.parser.parse_args(["dags", "backfill", "example_bash_operator"]) - - with pytest.raises(AirflowException): - dag_command.dag_backfill(cli_args) - - mock_get_dag.assert_not_called() - def test_show_dag_dependencies_print(self): with contextlib.redirect_stdout(StringIO()) as temp_stdout: dag_command.dag_dependencies_show(self.parser.parse_args(["dags", "show-dependencies"])) @@ -314,124 +181,6 @@ def test_show_dag_imgcat(self, mock_render_dag, mock_popen): assert "OUT" in out assert "ERR" in out - @mock.patch("airflow.cli.commands.dag_command.DAG.run") - def test_cli_backfill_ignore_first_depends_on_past(self, mock_run): - """ - Test that CLI respects -I argument - - We just check we call dag.run() right. The behaviour of that kwarg is - tested in test_jobs - """ - dag_id = "example_bash_operator" - run_date = DEFAULT_DATE + timedelta(days=1) - args = [ - "dags", - "backfill", - dag_id, - "--local", - "--start-date", - run_date.isoformat(), - ] - dag = self.dagbag.get_dag(dag_id) - - dag_command.dag_backfill(self.parser.parse_args(args), dag=dag) - - mock_run.assert_called_once_with( - start_date=run_date, - end_date=run_date, - conf=None, - delay_on_limit_secs=1.0, - donot_pickle=False, - ignore_first_depends_on_past=True, - ignore_task_deps=False, - local=True, - mark_success=False, - pool=None, - rerun_failed_tasks=False, - run_backwards=False, - verbose=False, - continue_on_failures=False, - disable_retry=False, - ) - - @pytest.mark.parametrize( - "cli_arg", - [ - pytest.param("-B", id="short"), - pytest.param("--run-backwards", id="full"), - ], - ) - @mock.patch("airflow.cli.commands.dag_command.DAG.run") - def test_cli_backfill_depends_on_past_run_backwards(self, mock_run, cli_arg: str): - """Test that CLI respects -B argument.""" - dag_id = "test_depends_on_past" - start_date = DEFAULT_DATE + timedelta(days=1) - end_date = start_date + timedelta(days=1) - args = [ - "dags", - "backfill", - dag_id, - "--local", - "--start-date", - start_date.isoformat(), - "--end-date", - end_date.isoformat(), - cli_arg, - ] - dag = self.dagbag.get_dag(dag_id) - - dag_command.dag_backfill(self.parser.parse_args(args), dag=dag) - mock_run.assert_called_once_with( - start_date=start_date, - end_date=end_date, - conf=None, - delay_on_limit_secs=1.0, - donot_pickle=False, - ignore_first_depends_on_past=True, - ignore_task_deps=False, - local=True, - mark_success=False, - pool=None, - rerun_failed_tasks=False, - run_backwards=True, - verbose=False, - continue_on_failures=False, - disable_retry=False, - ) - - @mock.patch("airflow.models.taskinstance.TaskInstance.dry_run") - @mock.patch("airflow.cli.commands.dag_command.DagRun") - def test_backfill_with_custom_timetable(self, mock_dagrun, mock_dry_run): - """ - when calling `dags backfill` on dag with custom timetable, the DagRun object should be created with - data_intervals. - """ - - start_date = DEFAULT_DATE + timedelta(days=1) - end_date = start_date + timedelta(days=1) - workdays = [ - start_date, - start_date + timedelta(days=1), - start_date + timedelta(days=2), - ] - cli_args = self.parser.parse_args( - [ - "dags", - "backfill", - "example_workday_timetable", - "--start-date", - start_date.isoformat(), - "--end-date", - end_date.isoformat(), - "--dry-run", - ] - ) - from airflow.example_dags.plugins.workday import AfterWorkdayTimetable - - with mock.patch.object(AfterWorkdayTimetable, "get_next_workday", side_effect=workdays): - dag_command.dag_backfill(cli_args) - assert "data_interval" in mock_dagrun.call_args.kwargs - def test_next_execution(self, tmp_path): dag_test_list = [ ("future_schedule_daily", "timedelta(days=5)", "'0 0 * * *'", "True"), diff --git a/tests/jobs/test_backfill_job.py b/tests/jobs/test_backfill_job.py index dead9be86230b..616f328ee4177 100644 --- a/tests/jobs/test_backfill_job.py +++ b/tests/jobs/test_backfill_job.py @@ -29,7 +29,6 @@ import pytest from airflow import settings -from airflow.cli import cli_parser from airflow.exceptions import ( AirflowException, BackfillUnfinished, @@ -186,7 +185,6 @@ def clean_db(): @pytest.fixture(autouse=True) def set_instance_attrs(self, dag_bag): self.clean_db() - self.parser = cli_parser.get_parser() self.dagbag = dag_bag # `airflow tasks run` relies on serialized_dag for dag in self.dagbag.dags.values(): @@ -1126,24 +1124,6 @@ def test_backfill_depends_on_past_backwards(self, mock_executor): with pytest.raises(AirflowException, match=expected_msg): run_job(job=job, execute_callable=job_runner._execute) - def test_cli_receives_delay_arg(self): - """ - Tests that the --delay argument is passed correctly to the BackfillJob - """ - dag_id = "example_bash_operator" - run_date = DEFAULT_DATE - args = [ - "dags", - "backfill", - dag_id, - "-s", - run_date.isoformat(), - "--delay-on-limit", - "0.5", - ] - parsed_args = self.parser.parse_args(args) - assert 0.5 == parsed_args.delay_on_limit - def _get_dag_test_max_active_limits( self, dag_maker_fixture, dag_id="test_dag", max_active_runs=1, **kwargs ): From db9c130a5c987a12ee0f1dba624275c5844c226c Mon Sep 17 00:00:00 2001 From: Jianzhun Du <68252326+sfc-gh-jdu@users.noreply.github.com> Date: Fri, 11 Oct 2024 17:41:32 -0700 Subject: [PATCH 082/125] Fix SNOWFLAKE_CONN_ID and DAG_ID in Snowpark system tests (#42952) --- .../tests/system/snowflake/example_snowpark_decorator.py | 4 ++-- providers/tests/system/snowflake/example_snowpark_operator.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/providers/tests/system/snowflake/example_snowpark_decorator.py b/providers/tests/system/snowflake/example_snowpark_decorator.py index 3f74b97d0f23e..4c2319a91b21e 100644 --- a/providers/tests/system/snowflake/example_snowpark_decorator.py +++ b/providers/tests/system/snowflake/example_snowpark_decorator.py @@ -30,8 +30,8 @@ from airflow import DAG from airflow.decorators import task -SNOWFLAKE_CONN_ID = "snowflake_default" -DAG_ID = "example_snowpark" +SNOWFLAKE_CONN_ID = "my_snowflake_conn" +DAG_ID = "example_snowpark_decorator" with DAG( DAG_ID, diff --git a/providers/tests/system/snowflake/example_snowpark_operator.py b/providers/tests/system/snowflake/example_snowpark_operator.py index 0543596ec9b1f..1d7cce34f7aa1 100644 --- a/providers/tests/system/snowflake/example_snowpark_operator.py +++ b/providers/tests/system/snowflake/example_snowpark_operator.py @@ -30,8 +30,8 @@ from airflow import DAG from airflow.providers.snowflake.operators.snowpark import SnowparkOperator -SNOWFLAKE_CONN_ID = "snowflake_default" -DAG_ID = "example_snowpark" +SNOWFLAKE_CONN_ID = "my_snowflake_conn" +DAG_ID = "example_snowpark_operator" with DAG( DAG_ID, From 15fc509bbbdeeca95759665e694e3075b3906ea2 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Sat, 12 Oct 2024 06:21:06 +0100 Subject: [PATCH 083/125] Fix broken links in Release Management docs (#42958) These links were pointing to the wrong location. This PR fixes it --- .../doc/09_release_management_tasks.rst | 54 +++++++++---------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/dev/breeze/doc/09_release_management_tasks.rst b/dev/breeze/doc/09_release_management_tasks.rst index 930f61159d16e..9feb9a5b195e6 100644 --- a/dev/breeze/doc/09_release_management_tasks.rst +++ b/dev/breeze/doc/09_release_management_tasks.rst @@ -26,7 +26,7 @@ do not need or have no access to run). Those are usually connected with releasin Those are all of the available release management commands: .. image:: ./images/output_release-management.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management.svg :width: 100% :alt: Breeze release management @@ -55,7 +55,7 @@ default is to build ``both`` type of packages ``sdist`` and ``wheel``. breeze release-management prepare-airflow-package --package-format=wheel .. image:: ./images/output_release-management_prepare-airflow-package.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_prepare-airflow-package.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_prepare-airflow-package.svg :width: 100% :alt: Breeze release-management prepare-airflow-package @@ -79,7 +79,7 @@ tarball for. breeze release-management prepare-airflow-tarball --version 2.8.0rc1 .. image:: ./images/output_release-management_prepare-airflow-tarball.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_prepare-airflow-tarball.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_prepare-airflow-tarball.svg :width: 100% :alt: Breeze release-management prepare-airflow-tarball @@ -94,7 +94,7 @@ automates it. breeze release-management create-minor-branch .. image:: ./images/output_release-management_create-minor-branch.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_create-minor-branch.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_create-minor-branch.svg :width: 100% :alt: Breeze release-management create-minor-branch @@ -109,7 +109,7 @@ When we prepare release candidate, we automate some of the steps we need to do. breeze release-management start-rc-process .. image:: ./images/output_release-management_start-rc-process.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_start-rc-process.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_start-rc-process.svg :width: 100% :alt: Breeze release-management start-rc-process @@ -123,7 +123,7 @@ When we prepare final release, we automate some of the steps we need to do. breeze release-management start-release .. image:: ./images/output_release-management_start-release.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_start-rc-process.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_start-rc-process.svg :width: 100% :alt: Breeze release-management start-rc-process @@ -154,7 +154,7 @@ You can also generate python client with custom security schemes. These are all of the available flags for the command: .. image:: ./images/output_release-management_prepare-python-client.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_prepare-python-client.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_prepare-python-client.svg :width: 100% :alt: Breeze release management prepare Python client @@ -185,7 +185,7 @@ step can be skipped if you pass the ``--skip-latest`` flag. These are all of the available flags for the ``release-prod-images`` command: .. image:: ./images/output_release-management_release-prod-images.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_release-prod-images.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_release-prod-images.svg :width: 100% :alt: Breeze release management release prod images @@ -208,7 +208,7 @@ However, If you want to disable this behaviour, set the envvar CLEAN_LOCAL_TAGS These are all of the available flags for the ``tag-providers`` command: .. image:: ./images/output_release-management_tag-providers.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_tag-providers.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_tag-providers.svg :width: 100% :alt: Breeze release management tag-providers @@ -234,7 +234,7 @@ which version of Helm Chart you are preparing the tarball for. breeze release-management prepare-helm-chart-tarball --version 1.12.0 --version-suffix rc1 .. image:: ./images/output_release-management_prepare-helm-chart-tarball.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_prepare-helm-chart-tarball.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_prepare-helm-chart-tarball.svg :width: 100% :alt: Breeze release-management prepare-helm-chart-tarball @@ -256,7 +256,7 @@ This prepares helm chart .tar.gz package in the dist folder. breeze release-management prepare-helm-chart-package --sign myemail@apache.org .. image:: ./images/output_release-management_prepare-helm-chart-package.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_prepare-helm-chart-package.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_prepare-helm-chart-package.svg :width: 100% :alt: Breeze release-management prepare-helm-chart-package @@ -292,7 +292,7 @@ The below example perform documentation preparation for provider packages. You can also add ``--answer yes`` to perform non-interactive build. .. image:: ./images/output_release-management_prepare-provider-documentation.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_prepare-provider-documentation.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg :width: 100% :alt: Breeze prepare-provider-documentation @@ -325,7 +325,7 @@ You can see all providers available by running this command: breeze release-management prepare-provider-packages --help .. image:: ./images/output_release-management_prepare-provider-packages.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_prepare-provider-packages.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_prepare-provider-packages.svg :width: 100% :alt: Breeze prepare-provider-packages @@ -349,7 +349,7 @@ You can also run the verification with an earlier airflow version to check for c All the command parameters are here: .. image:: ./images/output_release-management_install-provider-packages.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_install-provider-packages.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_install-provider-packages.svg :width: 100% :alt: Breeze install-provider-packages @@ -373,7 +373,7 @@ You can also run the verification with an earlier airflow version to check for c All the command parameters are here: .. image:: ./images/output_release-management_verify-provider-packages.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_verify-provider-packages.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_verify-provider-packages.svg :width: 100% :alt: Breeze verify-provider-packages @@ -387,7 +387,7 @@ provider has been released) and date of the release of the provider version. These are all of the available flags for the ``generate-providers-metadata`` command: .. image:: ./images/output_release-management_generate-providers-metadata.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_generate-providers-metadata.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_generate-providers-metadata.svg :width: 100% :alt: Breeze release management generate providers metadata @@ -398,7 +398,7 @@ Generating Provider Issue You can use Breeze to generate a provider issue when you release new providers. .. image:: ./images/output_release-management_generate-issue-content-providers.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_generate-issue-content-providers.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg :width: 100% :alt: Breeze generate-issue-content-providers @@ -414,7 +414,7 @@ command. These are all available flags of ``clean-old-provider-artifacts`` command: .. image:: ./images/output_release-management_clean-old-provider-artifacts.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_clean-old-provider-artifacts.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_clean-old-provider-artifacts.svg :width: 100% :alt: Breeze Clean Old Provider Artifacts @@ -462,7 +462,7 @@ Constraints are generated separately for each python version and there are separ These are all available flags of ``generate-constraints`` command: .. image:: ./images/output_release-management_generate-constraints.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_generate-constraints.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_generate-constraints.svg :width: 100% :alt: Breeze generate-constraints @@ -485,7 +485,7 @@ tagged already in the past. This can be done using ``breeze release-management u These are all available flags of ``update-constraints`` command: .. image:: ./images/output_release-management_update-constraints.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_update-constraints.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_update-constraints.svg :width: 100% :alt: Breeze update-constraints @@ -552,7 +552,7 @@ publishing docs for multiple providers. These are all available flags of ``release-management publish-docs`` command: .. image:: ./images/output_release-management_publish-docs.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_publish-docs.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_publish-docs.svg :width: 100% :alt: Breeze Publish documentation @@ -596,7 +596,7 @@ providers - you can mix apache-airflow, helm-chart and provider packages this wa These are all available flags of ``release-management add-back-references`` command: .. image:: ./images/output_release-management_add-back-references.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_release-management_add-back-references.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_add-back-references.svg :width: 100% :alt: Breeze Add Back References @@ -606,7 +606,7 @@ SBOM generation tasks Maintainers also can use Breeze for SBOM generation: .. image:: ./images/output_sbom.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_sbom.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_sbom.svg :width: 100% :alt: Breeze sbom @@ -619,7 +619,7 @@ done by the ``generate-providers-requirements`` command. This command generates selected provider and python version, using the airflow version specified. .. image:: ./images/output_sbom_generate-providers-requirements.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_sbom_generate-providers-requirements.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg :width: 100% :alt: Breeze generate SBOM provider requirements @@ -634,7 +634,7 @@ information is written directly to ``docs-archive`` in airflow-site repository. These are all of the available flags for the ``update-sbom-information`` command: .. image:: ./images/output_sbom_update-sbom-information.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_sbomt_update-sbom-information.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_sbomt_update-sbom-information.svg :width: 100% :alt: Breeze update sbom information @@ -646,7 +646,7 @@ such images are built with the ``build-all-airflow-images`` command. This command will build one docker image per python version, with all the airflow versions >=2.0.0 compatible. .. image:: ./images/output_sbom_build-all-airflow-images.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_sbom_build-all-airflow-images.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_sbom_build-all-airflow-images.svg :width: 100% :alt: Breeze build all airflow images @@ -658,7 +658,7 @@ The SBOM information published on our website can be converted into a spreadshee properties of the dependencies. This is done by the ``export-dependency-information`` command. .. image:: ./images/output_sbom_export-dependency-information.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_sbom_export-dependency-information.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_sbom_export-dependency-information.svg :width: 100% :alt: Breeze sbom export dependency information From 7d8ea68bdaae3258bd391b8f6ae0277258a7c437 Mon Sep 17 00:00:00 2001 From: GPK Date: Sat, 12 Oct 2024 08:47:57 +0100 Subject: [PATCH 084/125] Restrict looker-sdk version 24.18.0 and microsoft-kiota-http 1.3.4 (#42954) * restrict looker version 24.18.0 * update microsoft provider deps with microsoft-kiota-http * update gh issue ids to provider yaml files * ignore type in applfowhook * ignore type in applfowhook * ignore type in applfowhook --- generated/provider_dependencies.json | 3 ++- .../src/airflow/providers/amazon/aws/hooks/appflow.py | 8 ++++---- providers/src/airflow/providers/google/provider.yaml | 4 +++- .../src/airflow/providers/microsoft/azure/provider.yaml | 3 +++ 4 files changed, 12 insertions(+), 6 deletions(-) diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index e6a0deca23cbf..4d921fc1fb911 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -681,7 +681,7 @@ "httpx>=0.25.0", "immutabledict>=4.2.0", "json-merge-patch>=0.2", - "looker-sdk>=22.4.0", + "looker-sdk>=22.4.0,!=24.18.0", "pandas-gbq>=0.7.0", "pandas>=1.5.3,<2.2;python_version<\"3.9\"", "pandas>=2.1.2,<2.2;python_version>=\"3.9\"", @@ -827,6 +827,7 @@ "azure-storage-file-share>=12.7.0", "azure-synapse-artifacts>=0.17.0", "azure-synapse-spark>=0.2.0", + "microsoft-kiota-http>=1.3.0,!=1.3.4", "msgraph-core>=1.0.0" ], "devel-deps": [ diff --git a/providers/src/airflow/providers/amazon/aws/hooks/appflow.py b/providers/src/airflow/providers/amazon/aws/hooks/appflow.py index 5ef994917926b..e68637e50dc0b 100644 --- a/providers/src/airflow/providers/amazon/aws/hooks/appflow.py +++ b/providers/src/airflow/providers/amazon/aws/hooks/appflow.py @@ -117,9 +117,9 @@ def update_flow_filter(self, flow_name: str, filter_tasks, set_trigger_ondemand: self.conn.update_flow( flowName=response["flowName"], - destinationFlowConfigList=response["destinationFlowConfigList"], - sourceFlowConfig=response["sourceFlowConfig"], - triggerConfig=response["triggerConfig"], + destinationFlowConfigList=response["destinationFlowConfigList"], # type: ignore[arg-type] + sourceFlowConfig=response["sourceFlowConfig"], # type: ignore[arg-type] + triggerConfig=response["triggerConfig"], # type: ignore[arg-type] description=response.get("description", "Flow description."), - tasks=tasks, + tasks=tasks, # type: ignore[arg-type] ) diff --git a/providers/src/airflow/providers/google/provider.yaml b/providers/src/airflow/providers/google/provider.yaml index 9e469d278775b..196a035badc4d 100644 --- a/providers/src/airflow/providers/google/provider.yaml +++ b/providers/src/airflow/providers/google/provider.yaml @@ -157,7 +157,9 @@ dependencies: - grpcio-gcp>=0.2.2 - httpx>=0.25.0 - json-merge-patch>=0.2 - - looker-sdk>=22.4.0 + # looker-sdk 24.18.0 has issues in import looker_sdk.rtl, No module named looker_sdk.rtl + # See https://github.com/looker-open-source/sdk-codegen/issues/1518 + - looker-sdk>=22.4.0,!=24.18.0 - pandas-gbq>=0.7.0 # In pandas 2.2 minimal version of the sqlalchemy is 2.0 # https://pandas.pydata.org/docs/whatsnew/v2.2.0.html#increased-minimum-versions-for-dependencies diff --git a/providers/src/airflow/providers/microsoft/azure/provider.yaml b/providers/src/airflow/providers/microsoft/azure/provider.yaml index a47ec86de9f78..cf0b3f75ef9d8 100644 --- a/providers/src/airflow/providers/microsoft/azure/provider.yaml +++ b/providers/src/airflow/providers/microsoft/azure/provider.yaml @@ -108,6 +108,9 @@ dependencies: - azure-mgmt-containerregistry>=8.0.0 - azure-mgmt-containerinstance>=10.1.0 - msgraph-core>=1.0.0 + # msgraph-core has transient import failures with microsoft-kiota-http==1.3.4 + # See https://github.com/microsoftgraph/msgraph-sdk-python-core/issues/706 + - microsoft-kiota-http>=1.3.0,!=1.3.4 devel-dependencies: - pywinrm From 911efec913e630ebb8ea37af3755e648a4eedabc Mon Sep 17 00:00:00 2001 From: GPK Date: Sat, 12 Oct 2024 10:49:00 +0100 Subject: [PATCH 085/125] upgrade trove-classifiers (#42950) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 038393b139a02..844e1b6d3ffa6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ requires = [ "pluggy==1.5.0", "smmap==5.0.1", "tomli==2.0.2; python_version < '3.11'", - "trove-classifiers==2024.9.12", + "trove-classifiers==2024.10.11", ] build-backend = "hatchling.build" From 465332bb339f38fe4cbd4e2fb3be149aef80f89a Mon Sep 17 00:00:00 2001 From: Aakcht Date: Sat, 12 Oct 2024 19:23:54 +0500 Subject: [PATCH 086/125] Chart: fix VCT for scheduler in local and persistent mode (#42946) --- chart/templates/scheduler/scheduler-deployment.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/chart/templates/scheduler/scheduler-deployment.yaml b/chart/templates/scheduler/scheduler-deployment.yaml index 634c3a41f7de0..551ab94c8c48d 100644 --- a/chart/templates/scheduler/scheduler-deployment.yaml +++ b/chart/templates/scheduler/scheduler-deployment.yaml @@ -324,7 +324,9 @@ spec: emptyDir: {{- toYaml (default (dict) .Values.logs.emptyDirConfig) | nindent 12 }} {{- else }} volumeClaimTemplates: - - metadata: + - apiVersion: v1 + kind: PersistentVolumeClaim + metadata: name: logs {{- if .Values.workers.persistence.annotations }} annotations: {{- toYaml .Values.workers.persistence.annotations | nindent 10 }} From 794b153fcd72f1b2daf6b57ea14ee146d9c2a171 Mon Sep 17 00:00:00 2001 From: Jean-Eudes Peloye Date: Sat, 12 Oct 2024 16:28:30 +0200 Subject: [PATCH 087/125] fix: use instance base_container_name to fetch logs on trigger_reentry (#42960) --- .../providers/cncf/kubernetes/operators/pod.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/providers/src/airflow/providers/cncf/kubernetes/operators/pod.py b/providers/src/airflow/providers/cncf/kubernetes/operators/pod.py index 68081b5a67075..62af050a50cdd 100644 --- a/providers/src/airflow/providers/cncf/kubernetes/operators/pod.py +++ b/providers/src/airflow/providers/cncf/kubernetes/operators/pod.py @@ -31,7 +31,7 @@ from contextlib import AbstractContextManager from enum import Enum from functools import cached_property -from typing import TYPE_CHECKING, Any, Callable, Iterable, Sequence +from typing import TYPE_CHECKING, Any, Callable, Iterable, Literal, Sequence import kubernetes import tenacity @@ -91,7 +91,6 @@ if TYPE_CHECKING: import jinja2 from pendulum import DateTime - from typing_extensions import Literal from airflow.providers.cncf.kubernetes.secret import Secret from airflow.utils.context import Context @@ -285,7 +284,8 @@ def __init__( startup_timeout_seconds: int = 120, startup_check_interval_seconds: int = 5, get_logs: bool = True, - container_logs: Iterable[str] | str | Literal[True] = BASE_CONTAINER_NAME, + base_container_name: str | None = None, + container_logs: Iterable[str] | str | Literal[True] | None = None, image_pull_policy: str | None = None, annotations: dict | None = None, container_resources: k8s.V1ResourceRequirements | None = None, @@ -315,7 +315,6 @@ def __init__( termination_grace_period: int | None = None, configmaps: list[str] | None = None, skip_on_exit_code: int | Container[int] | None = None, - base_container_name: str | None = None, deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False), poll_interval: float = 2, log_pod_spec_on_failure: bool = True, @@ -357,9 +356,10 @@ def __init__( self.cluster_context = cluster_context self.reattach_on_restart = reattach_on_restart self.get_logs = get_logs - self.container_logs = container_logs - if self.container_logs == KubernetesPodOperator.BASE_CONTAINER_NAME: - self.container_logs = base_container_name or self.BASE_CONTAINER_NAME + # Fallback to the class variable BASE_CONTAINER_NAME here instead of via default argument value + # in the init method signature, to be compatible with subclasses overloading the class variable value. + self.base_container_name = base_container_name or self.BASE_CONTAINER_NAME + self.container_logs = container_logs or self.base_container_name self.image_pull_policy = image_pull_policy self.node_selector = node_selector or {} self.annotations = annotations or {} @@ -398,7 +398,6 @@ def __init__( if skip_on_exit_code is not None else [] ) - self.base_container_name = base_container_name or self.BASE_CONTAINER_NAME self.deferrable = deferrable self.poll_interval = poll_interval self.remote_pod: k8s.V1Pod | None = None @@ -785,7 +784,7 @@ def trigger_reentry(self, context: Context, event: dict[str, Any]) -> Any: pod_log_status = self.pod_manager.fetch_container_logs( pod=self.pod, - container_name=self.BASE_CONTAINER_NAME, + container_name=self.base_container_name, follow=follow, since_time=last_log_time, ) From 84726c275af0f4e1dc9ad63c57e61fe3fbcddca8 Mon Sep 17 00:00:00 2001 From: Geonwoo Kim <78521993+kgw7401@users.noreply.github.com> Date: Sat, 12 Oct 2024 23:37:24 +0900 Subject: [PATCH 088/125] fix: 03_contributors_quick_start_docs (#42927) * Change code-block position --- contributing-docs/03_contributors_quick_start.rst | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/contributing-docs/03_contributors_quick_start.rst b/contributing-docs/03_contributors_quick_start.rst index bddecc6f1e486..4ee65da0350b8 100644 --- a/contributing-docs/03_contributors_quick_start.rst +++ b/contributing-docs/03_contributors_quick_start.rst @@ -273,15 +273,14 @@ Setting up Breeze means that you are inside the Breeze container and ready to run most of the development tasks. You can leave the environment with ``exit`` and re-enter it with just ``breeze`` command. -.. code-block:: bash - - root@b76fcb399bb6:/opt/airflow# airflow db reset - - 6. Once you enter breeze environment, create airflow tables and users from the breeze CLI. ``airflow db reset`` is required to execute at least once for Airflow Breeze to get the database/tables created. If you run tests, however - the test database will be initialized automatically for you. +.. code-block:: bash + + root@b76fcb399bb6:/opt/airflow# airflow db reset + .. code-block:: bash root@b76fcb399bb6:/opt/airflow# airflow users create \ From 4cd1e7d7726489fb8d7f0890154d9f6030db7aa0 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Sat, 12 Oct 2024 15:40:10 +0100 Subject: [PATCH 089/125] Add skeleton project for task-sdk (#42904) closes https://github.com/apache/airflow/issues/42856 This PR adds a skeleton project for [AIP-72 Task SDK](https://cwiki.apache.org/confluence/display/AIRFLOW/AIP-72+Task+Execution+Interface+aka+Task+SDK) It creates this new project, integrates them to breeze & runs it on CI. The code & tests are just placeholders at this point, we will start adding things needs for DAG Authors in separate PRs. --- .dockerignore | 1 + .github/boring-cyborg.yml | 3 + .github/workflows/ci.yml | 1 + .github/workflows/task-sdk-tests.yml | 91 ++++++++++++++ Dockerfile | 11 +- Dockerfile.ci | 12 +- .../doc/09_release_management_tasks.rst | 23 ++++ .../doc/images/output_release-management.svg | 18 ++- .../doc/images/output_release-management.txt | 2 +- ...se-management_prepare-task-sdk-package.svg | 116 ++++++++++++++++++ ...se-management_prepare-task-sdk-package.txt | 1 + ...utput_setup_check-all-params-in-groups.svg | 20 +-- ...utput_setup_check-all-params-in-groups.txt | 2 +- ...output_setup_regenerate-command-images.svg | 20 +-- ...output_setup_regenerate-command-images.txt | 2 +- dev/breeze/doc/images/output_shell.svg | 4 +- dev/breeze/doc/images/output_shell.txt | 2 +- .../doc/images/output_testing_db-tests.svg | 6 +- .../doc/images/output_testing_db-tests.txt | 2 +- .../images/output_testing_non-db-tests.svg | 6 +- .../images/output_testing_non-db-tests.txt | 2 +- .../doc/images/output_testing_tests.svg | 8 +- .../doc/images/output_testing_tests.txt | 2 +- .../commands/release_management_commands.py | 107 +++++++++++++++- .../release_management_commands_config.py | 15 +++ .../src/airflow_breeze/global_constants.py | 18 +++ .../utils/docker_command_utils.py | 1 + .../src/airflow_breeze/utils/run_tests.py | 1 + .../airflow_breeze/utils/selective_checks.py | 21 +++- dev/breeze/tests/test_selective_checks.py | 10 +- pyproject.toml | 6 +- scripts/ci/docker-compose/local.yml | 3 + scripts/ci/kubernetes/k8s_requirements.txt | 1 + scripts/docker/install_airflow.sh | 11 +- task_sdk/README.md | 18 +++ task_sdk/pyproject.toml | 40 ++++++ task_sdk/src/airflow/sdk/__init__.py | 21 ++++ task_sdk/src/airflow/sdk/py.typed | 18 +++ task_sdk/tests/__init__.py | 16 +++ task_sdk/tests/conftest.py | 26 ++++ task_sdk/tests/test_hello.py | 23 ++++ 41 files changed, 650 insertions(+), 61 deletions(-) create mode 100644 .github/workflows/task-sdk-tests.yml create mode 100644 dev/breeze/doc/images/output_release-management_prepare-task-sdk-package.svg create mode 100644 dev/breeze/doc/images/output_release-management_prepare-task-sdk-package.txt create mode 100644 task_sdk/README.md create mode 100644 task_sdk/pyproject.toml create mode 100644 task_sdk/src/airflow/sdk/__init__.py create mode 100644 task_sdk/src/airflow/sdk/py.typed create mode 100644 task_sdk/tests/__init__.py create mode 100644 task_sdk/tests/conftest.py create mode 100644 task_sdk/tests/test_hello.py diff --git a/.dockerignore b/.dockerignore index bdbf5fb0883af..2d70030fd886b 100644 --- a/.dockerignore +++ b/.dockerignore @@ -35,6 +35,7 @@ !docs !licenses !providers/ +!task_sdk/ # Add those folders to the context so that they are available in the CI container !scripts diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index 019ea900f8e12..e790d65e2fe6f 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -679,6 +679,9 @@ labelPRBasedOnFilePath: area:system-tests: - tests/system/**/* + area:task-sdk: + - task_sdk/**/* + area:db-migrations: - airflow/migrations/versions/* diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 716323cb9acfd..2267154b03a7a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -93,6 +93,7 @@ jobs: run-ui-tests: ${{ steps.selective-checks.outputs.run-ui-tests }} run-www-tests: ${{ steps.selective-checks.outputs.run-www-tests }} run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} + run-task-sdk-tests: ${{ steps.selective-checks.outputs.run-task-sdk-tests }} basic-checks-only: ${{ steps.selective-checks.outputs.basic-checks-only }} ci-image-build: ${{ steps.selective-checks.outputs.ci-image-build }} prod-image-build: ${{ steps.selective-checks.outputs.prod-image-build }} diff --git a/.github/workflows/task-sdk-tests.yml b/.github/workflows/task-sdk-tests.yml new file mode 100644 index 0000000000000..14fae903837c2 --- /dev/null +++ b/.github/workflows/task-sdk-tests.yml @@ -0,0 +1,91 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: Task SDK tests +on: # yamllint disable-line rule:truthy + workflow_call: + inputs: + runs-on-as-json-default: + description: "The array of labels (in json form) determining default runner used for the build." + required: true + type: string + image-tag: + description: "Tag to set for the image" + required: true + type: string + canary-run: + description: "Whether this is a canary run" + required: true + type: string + default-python-version: + description: "Which version of python should be used by default" + required: true + type: string + python-versions: + description: "JSON-formatted array of Python versions to build images from" + required: true + type: string +jobs: + task-sdk-tests: + timeout-minutes: 80 + name: Task SDK:P${{ matrix.python-version }} tests + runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + strategy: + fail-fast: false + matrix: + python-version: "${{fromJSON(inputs.python-versions)}}" + env: + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + IMAGE_TAG: "${{ inputs.image-tag }}" + INCLUDE_NOT_READY_PROVIDERS: "true" + PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" + VERBOSE: "true" + CLEAN_AIRFLOW_INSTALLATION: "${{ inputs.canary-run }}" + if: inputs.run-task-sdk-tests == 'true' + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v4 + with: + persist-credentials: false + - name: "Cleanup docker" + run: ./scripts/ci/cleanup_docker.sh + - name: "Prepare breeze & CI image: ${{ matrix.python-version }}:${{ inputs.image-tag }}" + uses: ./.github/actions/prepare_breeze_and_image + - name: "Cleanup dist files" + run: rm -fv ./dist/* + - name: "Prepare Task SDK packages: wheel" + run: > + breeze release-management prepare-task-sdk-package --package-format wheel + - name: "Verify wheel packages with twine" + run: | + pipx uninstall twine || true + pipx install twine && twine check dist/*.whl + - name: > + Run provider unit tests on + Airflow Task SDK:Python ${{ matrix.python-version }} + if: matrix.run-tests == 'true' + run: > + breeze testing tests --run-in-parallel + --parallel-test-types TaskSDK + --use-packages-from-dist + --package-format wheel diff --git a/Dockerfile b/Dockerfile index ce229c75facef..7b5fa8d0a2b44 100644 --- a/Dockerfile +++ b/Dockerfile @@ -877,13 +877,18 @@ function install_airflow() { # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method local installation_command_flags if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then - # We need _a _ file in there otherwise the editable install doesn't include anything in the .pth file + # We need _a_ file in there otherwise the editable install doesn't include anything in the .pth file mkdir -p ./providers/src/airflow/providers/ touch ./providers/src/airflow/providers/__init__.py - trap 'rm -f ./providers/src/airflow/providers/__init__.py 2>/dev/null' EXIT + + # Similarly we need _a_ file for task_sdk too + mkdir -p ./task_sdk/src/airflow/sdk/ + touch ./task_sdk/src/airflow/__init__.py + + trap 'rm -f ./providers/src/airflow/providers/__init__.py ./task_sdk/src/airflow/__init__.py 2>/dev/null' EXIT # When installing from sources - we always use `--editable` mode - installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers" + installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers --editable ./task_sdk" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then diff --git a/Dockerfile.ci b/Dockerfile.ci index a3e982614de69..9339e9af6d6f8 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -676,13 +676,18 @@ function install_airflow() { # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method local installation_command_flags if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then - # We need _a _ file in there otherwise the editable install doesn't include anything in the .pth file + # We need _a_ file in there otherwise the editable install doesn't include anything in the .pth file mkdir -p ./providers/src/airflow/providers/ touch ./providers/src/airflow/providers/__init__.py - trap 'rm -f ./providers/src/airflow/providers/__init__.py 2>/dev/null' EXIT + + # Similarly we need _a_ file for task_sdk too + mkdir -p ./task_sdk/src/airflow/sdk/ + touch ./task_sdk/src/airflow/__init__.py + + trap 'rm -f ./providers/src/airflow/providers/__init__.py ./task_sdk/src/airflow/__init__.py 2>/dev/null' EXIT # When installing from sources - we always use `--editable` mode - installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers" + installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers --editable ./task_sdk" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then @@ -1356,6 +1361,7 @@ RUN bash /scripts/docker/install_pipx_tools.sh # So in case pyproject.toml changes we can install latest dependencies required. COPY pyproject.toml ${AIRFLOW_SOURCES}/pyproject.toml COPY providers/pyproject.toml ${AIRFLOW_SOURCES}/providers/pyproject.toml +COPY task_sdk/pyproject.toml ${AIRFLOW_SOURCES}/task_sdk/pyproject.toml COPY airflow/__init__.py ${AIRFLOW_SOURCES}/airflow/ COPY generated/* ${AIRFLOW_SOURCES}/generated/ COPY constraints/* ${AIRFLOW_SOURCES}/constraints/ diff --git a/dev/breeze/doc/09_release_management_tasks.rst b/dev/breeze/doc/09_release_management_tasks.rst index 9feb9a5b195e6..9cef5d6ccd15e 100644 --- a/dev/breeze/doc/09_release_management_tasks.rst +++ b/dev/breeze/doc/09_release_management_tasks.rst @@ -666,3 +666,26 @@ properties of the dependencies. This is done by the ``export-dependency-informat Next step: Follow the `Advanced Breeze topics <10_advanced_breeze_topics.rst>`_ to learn more about Breeze internals. + +Preparing airflow Task SDK packages +""""""""""""""""""""""""""""""""""" + +You can prepare airflow packages using Breeze: + +.. code-block:: bash + + breeze release-management prepare-task-sdk-package + +This prepares airflow Task SDK .whl package in the dist folder. + +Again, you can specify optional ``--package-format`` flag to build selected formats of the Task SDK packages, +default is to build ``both`` type of packages ``sdist`` and ``wheel``. + +.. code-block:: bash + + breeze release-management prepare-task-sdk-package --package-format=wheel + +.. image:: ./images/output_release-management_prepare-task-sdk-package.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/doc/images/output_release-management_prepare-airflow-package.svg + :width: 100% + :alt: Breeze release-management prepare-task-sdk-package diff --git a/dev/breeze/doc/images/output_release-management.svg b/dev/breeze/doc/images/output_release-management.svg index 3392f33e92dfe..2798700d8e26e 100644 --- a/dev/breeze/doc/images/output_release-management.svg +++ b/dev/breeze/doc/images/output_release-management.svg @@ -1,4 +1,4 @@ - +
generate-constraints    Generates pinned constraint files with all extras from pyproject.toml in parallel.         update-constraints      Update released constraints with manual changes.                                           ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Commands ───────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +prepare-task-sdk-package                Prepare sdist/whl package of Airflow Task SDK.                             +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_release-management.txt b/dev/breeze/doc/images/output_release-management.txt index 4655263b2bcfe..a186de953f690 100644 --- a/dev/breeze/doc/images/output_release-management.txt +++ b/dev/breeze/doc/images/output_release-management.txt @@ -1 +1 @@ -46d2a272002ee4a7dfefba98127d1680 +dbd26a6fc8d708bcbbe450896906bef6 diff --git a/dev/breeze/doc/images/output_release-management_prepare-task-sdk-package.svg b/dev/breeze/doc/images/output_release-management_prepare-task-sdk-package.svg new file mode 100644 index 0000000000000..63bb39a5189e8 --- /dev/null +++ b/dev/breeze/doc/images/output_release-management_prepare-task-sdk-package.svg @@ -0,0 +1,116 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Command: release-management prepare-task-sdk-package + + + + + + + + + + +Usage:breeze release-management prepare-task-sdk-package[OPTIONS] + +Prepare sdist/whl package of Airflow Task SDK. + +╭─ Package flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--package-formatFormat of packages.(wheel | sdist | both)[default: wheel] +--use-local-hatchUse local hatch instead of docker to build the package. You need to have hatch installed. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ + + + + diff --git a/dev/breeze/doc/images/output_release-management_prepare-task-sdk-package.txt b/dev/breeze/doc/images/output_release-management_prepare-task-sdk-package.txt new file mode 100644 index 0000000000000..f20d3a31463fa --- /dev/null +++ b/dev/breeze/doc/images/output_release-management_prepare-task-sdk-package.txt @@ -0,0 +1 @@ +adc65fd78e7040d42660aea2289d0f96 diff --git a/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg b/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg index 71feee5875aa1..a5f797e9cf211 100644 --- a/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg +++ b/dev/breeze/doc/images/output_setup_check-all-params-in-groups.svg @@ -190,16 +190,16 @@ release-management:prepare-airflow-tarball | release-management:prepare-helm-chart-package |            release-management:prepare-helm-chart-tarball | release-management:prepare-provider-documentation |     release-management:prepare-provider-packages | release-management:prepare-python-client |               -release-management:publish-docs | release-management:release-prod-images |                              -release-management:start-rc-process | release-management:start-release |                                -release-management:tag-providers | release-management:update-constraints |                              -release-management:verify-provider-packages | sbom | sbom:build-all-airflow-images |                    -sbom:export-dependency-information | sbom:generate-providers-requirements |                             -sbom:update-sbom-information | setup | setup:autocomplete | setup:check-all-params-in-groups |          -setup:config | setup:regenerate-command-images | setup:self-upgrade | setup:synchronize-local-mounts |  -setup:version | shell | start-airflow | static-checks | testing | testing:db-tests |                    -testing:docker-compose-tests | testing:helm-tests | testing:integration-tests | testing:non-db-tests |  -testing:tests)                                                                                          +release-management:prepare-task-sdk-package | release-management:publish-docs |                         +release-management:release-prod-images | release-management:start-rc-process |                          +release-management:start-release | release-management:tag-providers |                                   +release-management:update-constraints | release-management:verify-provider-packages | sbom |            +sbom:build-all-airflow-images | sbom:export-dependency-information |                                    +sbom:generate-providers-requirements | sbom:update-sbom-information | setup | setup:autocomplete |      +setup:check-all-params-in-groups | setup:config | setup:regenerate-command-images | setup:self-upgrade  +| setup:synchronize-local-mounts | setup:version | shell | start-airflow | static-checks | testing |    +testing:db-tests | testing:docker-compose-tests | testing:helm-tests | testing:integration-tests |      +testing:non-db-tests | testing:tests)                                                                   ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ --verbose-vPrint verbose information about performed steps. diff --git a/dev/breeze/doc/images/output_setup_check-all-params-in-groups.txt b/dev/breeze/doc/images/output_setup_check-all-params-in-groups.txt index 098640fbe5bb8..d2cab78ff8c5b 100644 --- a/dev/breeze/doc/images/output_setup_check-all-params-in-groups.txt +++ b/dev/breeze/doc/images/output_setup_check-all-params-in-groups.txt @@ -1 +1 @@ -a6a4664c4cf6fdf2bc51ffaa9c0e8177 +852bdb14696f768b8a22551ba88bf061 diff --git a/dev/breeze/doc/images/output_setup_regenerate-command-images.svg b/dev/breeze/doc/images/output_setup_regenerate-command-images.svg index 83018c3d8d39f..c107a1843b4e5 100644 --- a/dev/breeze/doc/images/output_setup_regenerate-command-images.svg +++ b/dev/breeze/doc/images/output_setup_regenerate-command-images.svg @@ -204,16 +204,16 @@ release-management:prepare-airflow-tarball | release-management:prepare-helm-chart-package |         release-management:prepare-helm-chart-tarball | release-management:prepare-provider-documentation |  release-management:prepare-provider-packages | release-management:prepare-python-client |            -release-management:publish-docs | release-management:release-prod-images |                           -release-management:start-rc-process | release-management:start-release |                             -release-management:tag-providers | release-management:update-constraints |                           -release-management:verify-provider-packages | sbom | sbom:build-all-airflow-images |                 -sbom:export-dependency-information | sbom:generate-providers-requirements |                          -sbom:update-sbom-information | setup | setup:autocomplete | setup:check-all-params-in-groups |       -setup:config | setup:regenerate-command-images | setup:self-upgrade | setup:synchronize-local-mounts -| setup:version | shell | start-airflow | static-checks | testing | testing:db-tests |               -testing:docker-compose-tests | testing:helm-tests | testing:integration-tests | testing:non-db-tests -| testing:tests)                                                                                     +release-management:prepare-task-sdk-package | release-management:publish-docs |                      +release-management:release-prod-images | release-management:start-rc-process |                       +release-management:start-release | release-management:tag-providers |                                +release-management:update-constraints | release-management:verify-provider-packages | sbom |         +sbom:build-all-airflow-images | sbom:export-dependency-information |                                 +sbom:generate-providers-requirements | sbom:update-sbom-information | setup | setup:autocomplete |   +setup:check-all-params-in-groups | setup:config | setup:regenerate-command-images |                  +setup:self-upgrade | setup:synchronize-local-mounts | setup:version | shell | start-airflow |        +static-checks | testing | testing:db-tests | testing:docker-compose-tests | testing:helm-tests |     +testing:integration-tests | testing:non-db-tests | testing:tests)                                    --check-onlyOnly check if some images need to be regenerated. Return 0 if no need or 1 if needed. Cannot be used together with --command flag or --force.                                                             ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_setup_regenerate-command-images.txt b/dev/breeze/doc/images/output_setup_regenerate-command-images.txt index b0b5b55ddd286..751b0329d71fc 100644 --- a/dev/breeze/doc/images/output_setup_regenerate-command-images.txt +++ b/dev/breeze/doc/images/output_setup_regenerate-command-images.txt @@ -1 +1 @@ -4a736bf190984799f73b3bf85dc62c4c +326695396d27b77b860202ebe9267746 diff --git a/dev/breeze/doc/images/output_shell.svg b/dev/breeze/doc/images/output_shell.svg index 1e23fe528b6d3..19b93c0ca15ea 100644 --- a/dev/breeze/doc/images/output_shell.svg +++ b/dev/breeze/doc/images/output_shell.svg @@ -654,8 +654,8 @@ (All | Default | API | Always | BranchExternalPython |   BranchPythonVenv | CLI | Core | ExternalPython |         Operators | Other | PlainAsserts | Providers |           -PythonVenv | Serialization | WWW | All-Postgres |        -All-MySQL | All-Quarantined)                             +PythonVenv | Serialization | TaskSDK | WWW |             +All-Postgres | All-MySQL | All-Quarantined)              [default: Default]                                       --use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It   can also be version (to install from PyPI), `none`,      diff --git a/dev/breeze/doc/images/output_shell.txt b/dev/breeze/doc/images/output_shell.txt index 51aa99ed73ca6..e09e77274a041 100644 --- a/dev/breeze/doc/images/output_shell.txt +++ b/dev/breeze/doc/images/output_shell.txt @@ -1 +1 @@ -94eabd0345df65038958fc4842a0ff4b +10bd351167267ce16d35b96898a64b74 diff --git a/dev/breeze/doc/images/output_testing_db-tests.svg b/dev/breeze/doc/images/output_testing_db-tests.svg index 708665af52de3..d9d92ed40f050 100644 --- a/dev/breeze/doc/images/output_testing_db-tests.svg +++ b/dev/breeze/doc/images/output_testing_db-tests.svg @@ -410,15 +410,15 @@ --parallel-test-typesSpace separated list of test types used for testing in parallel                    (API | Always | BranchExternalPython | BranchPythonVenv | CLI | Core |             ExternalPython | Operators | Other | PlainAsserts | Providers | PythonVenv |       -Serialization | WWW)                                                               +Serialization | TaskSDK | WWW)                                                     [default: API Always BranchExternalPython BranchPythonVenv CLI Core ExternalPython Operators Other PlainAsserts Providers[-amazon,google] Providers[amazon]           -Providers[google] PythonVenv Serialization WWW]                                    +Providers[google] PythonVenv Serialization TaskSDK WWW]                            --database-isolationRun airflow in database isolation mode. --excluded-parallel-test-typesSpace separated list of test types that will be excluded from parallel tes runs.   (API | Always | BranchExternalPython | BranchPythonVenv | CLI | Core |             ExternalPython | Operators | Other | PlainAsserts | Providers | PythonVenv |       -Serialization | WWW)                                                               +Serialization | TaskSDK | WWW)                                                     ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Test options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ --test-timeoutTest timeout in seconds. Set the pytest setup, execution and teardown timeouts to this      diff --git a/dev/breeze/doc/images/output_testing_db-tests.txt b/dev/breeze/doc/images/output_testing_db-tests.txt index 6027a2d1e666a..6245a387214f2 100644 --- a/dev/breeze/doc/images/output_testing_db-tests.txt +++ b/dev/breeze/doc/images/output_testing_db-tests.txt @@ -1 +1 @@ -7b406b63cd4a75aba6ac38b8c0b7431c +690396dbea7c9b6e018704e1ee7f727d diff --git a/dev/breeze/doc/images/output_testing_non-db-tests.svg b/dev/breeze/doc/images/output_testing_non-db-tests.svg index da43621dd6931..daf8061b13e40 100644 --- a/dev/breeze/doc/images/output_testing_non-db-tests.svg +++ b/dev/breeze/doc/images/output_testing_non-db-tests.svg @@ -376,14 +376,14 @@ --parallel-test-typesSpace separated list of test types used for testing in parallel                    (API | Always | BranchExternalPython | BranchPythonVenv | CLI | Core |             ExternalPython | Operators | Other | PlainAsserts | Providers | PythonVenv |       -Serialization | WWW)                                                               +Serialization | TaskSDK | WWW)                                                     [default: API Always BranchExternalPython BranchPythonVenv CLI Core ExternalPython Operators Other PlainAsserts Providers[-amazon,google] Providers[amazon]           -Providers[google] PythonVenv Serialization WWW]                                    +Providers[google] PythonVenv Serialization TaskSDK WWW]                            --excluded-parallel-test-typesSpace separated list of test types that will be excluded from parallel tes runs.   (API | Always | BranchExternalPython | BranchPythonVenv | CLI | Core |             ExternalPython | Operators | Other | PlainAsserts | Providers | PythonVenv |       -Serialization | WWW)                                                               +Serialization | TaskSDK | WWW)                                                     ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Test options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ --test-timeoutTest timeout in seconds. Set the pytest setup, execution and teardown timeouts to this      diff --git a/dev/breeze/doc/images/output_testing_non-db-tests.txt b/dev/breeze/doc/images/output_testing_non-db-tests.txt index 1080b5a377717..122c3b1acd145 100644 --- a/dev/breeze/doc/images/output_testing_non-db-tests.txt +++ b/dev/breeze/doc/images/output_testing_non-db-tests.txt @@ -1 +1 @@ -2cc222da8b9f31b93b527220b76b48a2 +dbecb30a3e03c7dffd09d46c16687e62 diff --git a/dev/breeze/doc/images/output_testing_tests.svg b/dev/breeze/doc/images/output_testing_tests.svg index c20e2ef16b243..8c8c881915a31 100644 --- a/dev/breeze/doc/images/output_testing_tests.svg +++ b/dev/breeze/doc/images/output_testing_tests.svg @@ -466,19 +466,19 @@ `Providers[-amazon,google]`                                                        (All | Default | API | Always | BranchExternalPython | BranchPythonVenv | CLI |    Core | ExternalPython | Operators | Other | PlainAsserts | Providers | PythonVenv  -| Serialization | WWW | All-Postgres | All-MySQL | All-Quarantined)                +| Serialization | TaskSDK | WWW | All-Postgres | All-MySQL | All-Quarantined)      [default: Default]                                                                 --parallel-test-typesSpace separated list of test types used for testing in parallel                    (API | Always | BranchExternalPython | BranchPythonVenv | CLI | Core |             ExternalPython | Operators | Other | PlainAsserts | Providers | PythonVenv |       -Serialization | WWW)                                                               +Serialization | TaskSDK | WWW)                                                     [default: API Always BranchExternalPython BranchPythonVenv CLI Core ExternalPython Operators Other PlainAsserts Providers[-amazon,google] Providers[amazon]           -Providers[google] PythonVenv Serialization WWW]                                    +Providers[google] PythonVenv Serialization TaskSDK WWW]                            --excluded-parallel-test-typesSpace separated list of test types that will be excluded from parallel tes runs.   (API | Always | BranchExternalPython | BranchPythonVenv | CLI | Core |             ExternalPython | Operators | Other | PlainAsserts | Providers | PythonVenv |       -Serialization | WWW)                                                               +Serialization | TaskSDK | WWW)                                                     ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Test options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ --test-timeoutTest timeout in seconds. Set the pytest setup, execution and teardown timeouts to this    diff --git a/dev/breeze/doc/images/output_testing_tests.txt b/dev/breeze/doc/images/output_testing_tests.txt index d19f6cf5abdae..ce894444f7f3b 100644 --- a/dev/breeze/doc/images/output_testing_tests.txt +++ b/dev/breeze/doc/images/output_testing_tests.txt @@ -1 +1 @@ -15002aa129ce25039921f800fb1cf744 +c131a2a8ab980041a1a0f5e91fe58ea8 diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index 95ecf30144ab5..a56947f026f04 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -338,10 +338,16 @@ def from_wheel(cls, filepath: Path) -> DistributionPackageInfo: @classmethod def dist_packages( - cls, *, package_format: str, dist_directory: Path, build_type: Literal["airflow", "providers"] + cls, + *, + package_format: str, + dist_directory: Path, + build_type: Literal["airflow", "providers", "task-sdk"], ) -> tuple[DistributionPackageInfo, ...]: if build_type == "airflow": default_glob_pattern = "apache[_-]airflow-[0-9]" + elif build_type == "task-sdk": + default_glob_pattern = "apache[_-]airflow[_-]task[_-]sdk" else: default_glob_pattern = "apache[_-]airflow[_-]providers" dists_info = [] @@ -549,6 +555,105 @@ def prepare_airflow_packages( get_console().print("[success]Successfully prepared Airflow packages") +TASK_SDK_DIR_PATH = AIRFLOW_SOURCES_ROOT / "task_sdk" +TASK_SDK_DIST_DIR_PATH = TASK_SDK_DIR_PATH / "dist" + + +@release_management.command( + name="prepare-task-sdk-package", + help="Prepare sdist/whl package of Airflow Task SDK.", +) +@option_package_format +@option_use_local_hatch +@option_verbose +@option_dry_run +def prepare_airflow_task_sdk_packages( + package_format: str, + use_local_hatch: bool, +): + check_python_version() + perform_environment_checks() + fix_ownership_using_docker() + cleanup_python_generated_files() + + def _build_package_with_hatch(package_format: str): + command = [ + "hatch", + "build", + "-c", + ] + if package_format == "sdist" or package_format == "both": + command += ["-t", "sdist"] + if package_format == "wheel" or package_format == "both": + command += ["-t", "wheel"] + env_copy = os.environ.copy() + run_command( + cmd=command, + cwd=TASK_SDK_DIR_PATH, + env=env_copy, + check=True, + ) + shutil.copytree(TASK_SDK_DIST_DIR_PATH, DIST_DIR, dirs_exist_ok=True) + + def _build_package_with_docker(package_format: str): + _build_local_build_image() + command = "hatch build -c " + if package_format == "sdist" or package_format == "both": + command += "-t sdist " + if package_format == "wheel" or package_format == "both": + command += "-t wheel " + container_id = f"airflow-task-sdk-build-{random.getrandbits(64):08x}" + result = run_command( + cmd=[ + "docker", + "run", + "--name", + container_id, + "-t", + "-e", + "HOME=/opt/airflow/files/home", + "-e", + "GITHUB_ACTIONS", + "-w", + "/opt/airflow/task_sdk", + AIRFLOW_BUILD_IMAGE_TAG, + "bash", + "-c", + command, + ], + check=False, + ) + if result.returncode != 0: + get_console().print("[error]Error preparing Airflow Task SDK[/]") + fix_ownership_using_docker() + sys.exit(result.returncode) + DIST_DIR.mkdir(parents=True, exist_ok=True) + get_console().print() + # Copy all files in the dist directory in container to the host dist directory (note '/.' in SRC) + run_command(["docker", "cp", f"{container_id}:/opt/airflow/task_sdk/dist/.", "./dist"], check=True) + run_command(["docker", "rm", "--force", container_id], check=False, stdout=DEVNULL, stderr=DEVNULL) + + if use_local_hatch: + _build_package_with_hatch( + package_format=package_format, + ) + get_console().print("[info]Checking if sdist packages can be built into wheels[/]") + packages = DistributionPackageInfo.dist_packages( + package_format=package_format, dist_directory=DIST_DIR, build_type="task-sdk" + ) + get_console().print() + _check_sdist_to_wheel_dists(packages) + get_console().print("\n[info]Packages available in dist:[/]\n") + for dist_info in packages: + get_console().print(str(dist_info)) + get_console().print() + else: + _build_package_with_docker( + package_format=package_format, + ) + get_console().print("[success]Successfully prepared Airflow Task SDK packages") + + def provider_action_summary(description: str, message_type: MessageType, packages: list[str]): if packages: get_console().print(f"{description}: {len(packages)}\n") diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands_config.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands_config.py index 6030848444b34..b232666aba7e5 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands_config.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands_config.py @@ -52,6 +52,12 @@ ], } +RELEASE_AIRFLOW_TASK_SDK_COMMANDS: dict[str, str | list[str]] = { + "name": "Airflow Task SDK release commands", + "commands": [ + "prepare-task-sdk-package", + ], +} RELEASE_OTHER_COMMANDS: dict[str, str | list[str]] = { "name": "Other release commands", @@ -83,6 +89,15 @@ ], } ], + "breeze release-management prepare-task-sdk-package": [ + { + "name": "Package flags", + "options": [ + "--package-format", + "--use-local-hatch", + ], + } + ], "breeze release-management prepare-helm-chart-tarball": [ { "name": "Package flags", diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index 64bf8112112ab..3879c121433f5 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -185,6 +185,7 @@ class SelectiveUnitTestTypes(Enum): PLAIN_ASSERTS = "PlainAsserts" PROVIDERS = "Providers" PYTHON_VENV = "PythonVenv" + TASK_SDK = "TaskSDK" WWW = "WWW" @@ -218,6 +219,23 @@ def all_helm_test_packages() -> list[str]: *all_helm_test_packages(), ] + +@cache +def all_task_sdk_test_packages() -> list[str]: + return sorted( + [ + candidate.name + for candidate in (AIRFLOW_SOURCES_ROOT / "task_sdk" / "tests").iterdir() + if candidate.is_dir() and candidate.name != "__pycache__" + ] + ) + + +ALLOWED_TASK_SDK_TEST_PACKAGES = [ + "all", + *all_task_sdk_test_packages(), +] + ALLOWED_PACKAGE_FORMATS = ["wheel", "sdist", "both"] ALLOWED_INSTALLATION_PACKAGE_FORMATS = ["wheel", "sdist"] ALLOWED_INSTALLATION_METHODS = [".", "apache-airflow"] diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py index 618d064e0e41f..164a1583e1fea 100644 --- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py @@ -93,6 +93,7 @@ ("hooks", "/opt/airflow/hooks"), ("logs", "/root/airflow/logs"), ("providers", "/opt/airflow/providers"), + ("task_sdk", "/opt/airflow/task_sdk"), ("pyproject.toml", "/opt/airflow/pyproject.toml"), ("scripts", "/opt/airflow/scripts"), ("scripts/docker/entrypoint_ci.sh", "/entrypoint"), diff --git a/dev/breeze/src/airflow_breeze/utils/run_tests.py b/dev/breeze/src/airflow_breeze/utils/run_tests.py index 23fe5b6e1965e..acde9d0cc4d42 100644 --- a/dev/breeze/src/airflow_breeze/utils/run_tests.py +++ b/dev/breeze/src/airflow_breeze/utils/run_tests.py @@ -169,6 +169,7 @@ def get_excluded_provider_args(python_version: str) -> list[str]: "tests/serialization", ], "System": ["tests/system"], + "TaskSDK": ["task_sdk/tests"], "WWW": [ "tests/www", ], diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index 18458af5f8a23..0cb69a194713d 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -87,12 +87,12 @@ "API Always BranchExternalPython BranchPythonVenv " "CLI Core ExternalPython Operators Other PlainAsserts " "Providers[-amazon,google] Providers[amazon] Providers[google] " - "PythonVenv Serialization WWW" + "PythonVenv Serialization TaskSDK WWW" ) ALL_CI_SELECTIVE_TEST_TYPES_WITHOUT_PROVIDERS = ( "API Always BranchExternalPython BranchPythonVenv CLI Core " - "ExternalPython Operators Other PlainAsserts PythonVenv Serialization WWW" + "ExternalPython Operators Other PlainAsserts PythonVenv Serialization TaskSDK WWW" ) ALL_PROVIDERS_SELECTIVE_TEST_TYPES = "Providers[-amazon,google] Providers[amazon] Providers[google]" @@ -112,6 +112,7 @@ class FileGroupForCi(Enum): LEGACY_WWW_FILES = "legacy_www_files" SYSTEM_TEST_FILES = "system_tests" KUBERNETES_FILES = "kubernetes_files" + TASK_SDK_FILES = "task_sdk_files" ALL_PYTHON_FILES = "all_python_files" ALL_SOURCE_FILES = "all_sources_for_tests" ALL_AIRFLOW_PYTHON_FILES = "all_airflow_python_files" @@ -227,6 +228,8 @@ def __hash__(self): r"^chart", r"^providers/src/", r"^providers/tests/", + r"^task_sdk/src/", + r"^task_sdk/tests/", r"^tests", r"^kubernetes_tests", ], @@ -243,6 +246,10 @@ def __hash__(self): r"^tests/utils/", r"^dev/tests_common/.*\.py$", ], + FileGroupForCi.TASK_SDK_FILES: [ + r"^task_sdk/src/airflow/sdk/.*\.py$", + r"^task_sdk/tests/.*\.py$", + ], } ) @@ -258,6 +265,8 @@ def __hash__(self): r"^providers/tests/.*", r"^providers/tests/system/.*", r"^tests/dags/test_imports.py", + r"^task_sdk/src/airflow/sdk/.*\.py$", + r"^task_sdk/tests/.*\.py$", ] } ) @@ -296,6 +305,10 @@ def __hash__(self): r"^airflow/serialization/", r"^tests/serialization/", ], + SelectiveUnitTestTypes.TASK_SDK: [ + r"^task_sdk/src/airflow/sdk/", + r"^task_sdk/tests/", + ], SelectiveUnitTestTypes.PYTHON_VENV: PYTHON_OPERATOR_FILES, SelectiveUnitTestTypes.BRANCH_PYTHON_VENV: PYTHON_OPERATOR_FILES, SelectiveUnitTestTypes.EXTERNAL_PYTHON: PYTHON_OPERATOR_FILES, @@ -700,6 +713,10 @@ def run_amazon_tests(self) -> bool: or "Providers" in self.parallel_test_types_list_as_string.split(" ") ) + @cached_property + def run_task_sdk_tests(self) -> bool: + return self._should_be_run(FileGroupForCi.TASK_SDK_FILES) + @cached_property def run_kubernetes_tests(self) -> bool: return self._should_be_run(FileGroupForCi.KUBERNETES_FILES) diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index be3922d461963..6d031f70b875a 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -1140,7 +1140,7 @@ def test_full_test_needed_when_scripts_changes(files: tuple[str, ...], expected_ "separate-test-types-list-as-string": "API Always BranchExternalPython BranchPythonVenv " "CLI Core ExternalPython Operators Other PlainAsserts " + LIST_OF_ALL_PROVIDER_TESTS - + " PythonVenv Serialization WWW", + + " PythonVenv Serialization TaskSDK WWW", "needs-mypy": "true", "mypy-folders": "['airflow', 'providers', 'docs', 'dev']", }, @@ -1171,10 +1171,10 @@ def test_full_test_needed_when_scripts_changes(files: tuple[str, ...], expected_ "upgrade-to-newer-dependencies": "false", "parallel-test-types-list-as-string": "API Always BranchExternalPython " "BranchPythonVenv CLI Core ExternalPython Operators Other PlainAsserts " - "PythonVenv Serialization WWW", + "PythonVenv Serialization TaskSDK WWW", "separate-test-types-list-as-string": "API Always BranchExternalPython " "BranchPythonVenv CLI Core ExternalPython Operators Other PlainAsserts " - "PythonVenv Serialization WWW", + "PythonVenv Serialization TaskSDK WWW", "needs-mypy": "true", "mypy-folders": "['airflow', 'docs', 'dev']", }, @@ -1300,7 +1300,7 @@ def test_expected_output_full_tests_needed( "upgrade-to-newer-dependencies": "false", "skip-provider-tests": "true", "parallel-test-types-list-as-string": "API Always BranchExternalPython BranchPythonVenv " - "CLI Core ExternalPython Operators Other PlainAsserts PythonVenv Serialization WWW", + "CLI Core ExternalPython Operators Other PlainAsserts PythonVenv Serialization TaskSDK WWW", "needs-mypy": "true", "mypy-folders": "['airflow']", }, @@ -1536,7 +1536,7 @@ def test_expected_output_pull_request_target( "docs-list-as-string": "apache-airflow docker-stack", "upgrade-to-newer-dependencies": "true", "parallel-test-types-list-as-string": "API Always BranchExternalPython BranchPythonVenv " - "CLI Core ExternalPython Operators Other PlainAsserts PythonVenv Serialization WWW", + "CLI Core ExternalPython Operators Other PlainAsserts PythonVenv Serialization TaskSDK WWW", "needs-mypy": "true", "mypy-folders": "['airflow', 'docs', 'dev']", }, diff --git a/pyproject.toml b/pyproject.toml index 844e1b6d3ffa6..6fafa7e99b06c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -342,7 +342,7 @@ section-order = [ # Make sure we put the "dev" imports at the end, not as a third-party module [tool.ruff.lint.isort.sections] -testing = ["dev", "providers.tests"] +testing = ["dev", "providers.tests", "task_sdk.tests"] [tool.ruff.lint.extend-per-file-ignores] "airflow/__init__.py" = ["F401", "TCH004"] @@ -626,12 +626,14 @@ ignore_errors = true [tool.uv] dev-dependencies = [ "local-providers", + "apache-airflow-task-sdk" ] [tool.uv.sources] # These names must match the names as defined in the pyproject.toml of the workspace items, # *not* the workspace folder paths local-providers = { workspace = true } +apache-airflow-task-sdk = { workspace = true } [tool.uv.workspace] -members = ["providers"] +members = ["providers", "task_sdk"] diff --git a/scripts/ci/docker-compose/local.yml b/scripts/ci/docker-compose/local.yml index 66a4736c5fd8d..f0bce46894164 100644 --- a/scripts/ci/docker-compose/local.yml +++ b/scripts/ci/docker-compose/local.yml @@ -88,6 +88,9 @@ services: - type: bind source: ../../../providers target: /opt/airflow/providers + - type: bind + source: ../../../task_sdk + target: /opt/airflow/task_sdk - type: bind source: ../../../pyproject.toml target: /opt/airflow/pyproject.toml diff --git a/scripts/ci/kubernetes/k8s_requirements.txt b/scripts/ci/kubernetes/k8s_requirements.txt index 1e587051f5845..e04ef56412794 100644 --- a/scripts/ci/kubernetes/k8s_requirements.txt +++ b/scripts/ci/kubernetes/k8s_requirements.txt @@ -1,3 +1,4 @@ --constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt -e .[devel-devscripts,devel-tests,cncf.kubernetes] -e ./providers +-e ./task_sdk diff --git a/scripts/docker/install_airflow.sh b/scripts/docker/install_airflow.sh index 324bca7ccfe09..769279c2cb0f4 100644 --- a/scripts/docker/install_airflow.sh +++ b/scripts/docker/install_airflow.sh @@ -48,13 +48,18 @@ function install_airflow() { # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method local installation_command_flags if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then - # We need _a _ file in there otherwise the editable install doesn't include anything in the .pth file + # We need _a_ file in there otherwise the editable install doesn't include anything in the .pth file mkdir -p ./providers/src/airflow/providers/ touch ./providers/src/airflow/providers/__init__.py - trap 'rm -f ./providers/src/airflow/providers/__init__.py 2>/dev/null' EXIT + + # Similarly we need _a_ file for task_sdk too + mkdir -p ./task_sdk/src/airflow/sdk/ + touch ./task_sdk/src/airflow/__init__.py + + trap 'rm -f ./providers/src/airflow/providers/__init__.py ./task_sdk/src/airflow/__init__.py 2>/dev/null' EXIT # When installing from sources - we always use `--editable` mode - installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers" + installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers --editable ./task_sdk" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then diff --git a/task_sdk/README.md b/task_sdk/README.md new file mode 100644 index 0000000000000..ef14affc68c62 --- /dev/null +++ b/task_sdk/README.md @@ -0,0 +1,18 @@ + diff --git a/task_sdk/pyproject.toml b/task_sdk/pyproject.toml new file mode 100644 index 0000000000000..149a9731ce994 --- /dev/null +++ b/task_sdk/pyproject.toml @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +[project] +name = "apache-airflow-task-sdk" +version = "0.1.0.dev0" +description = "Python Task SDK for Apache Airflow DAG Authors" +#readme = "README.md" +requires-python = ">=3.9, <3.13" +dependencies = [] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["src/airflow"] + +[tool.ruff] +extend = "../pyproject.toml" +src = ["src"] + +[tool.ruff.lint.per-file-ignores] + +# Ignore Doc rules et al for anything outside of tests +"!src/*" = ["D", "TID253", "S101", "TRY002"] diff --git a/task_sdk/src/airflow/sdk/__init__.py b/task_sdk/src/airflow/sdk/__init__.py new file mode 100644 index 0000000000000..2a3e01b64bc4b --- /dev/null +++ b/task_sdk/src/airflow/sdk/__init__.py @@ -0,0 +1,21 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + + +def hello() -> str: + return "Hello from task-sdk!" diff --git a/task_sdk/src/airflow/sdk/py.typed b/task_sdk/src/airflow/sdk/py.typed new file mode 100644 index 0000000000000..445548d86368f --- /dev/null +++ b/task_sdk/src/airflow/sdk/py.typed @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Marker file for PEP 561. This package uses inline types. diff --git a/task_sdk/tests/__init__.py b/task_sdk/tests/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/task_sdk/tests/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/task_sdk/tests/conftest.py b/task_sdk/tests/conftest.py new file mode 100644 index 0000000000000..209fad46d7d62 --- /dev/null +++ b/task_sdk/tests/conftest.py @@ -0,0 +1,26 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import pytest + +pytest_plugins = "dev.tests_common.pytest_plugin" + + +@pytest.hookimpl(tryfirst=True) +def pytest_configure(config: pytest.Config) -> None: + config.inicfg["airflow_deprecations_ignore"] = () diff --git a/task_sdk/tests/test_hello.py b/task_sdk/tests/test_hello.py new file mode 100644 index 0000000000000..62cfdc069ca05 --- /dev/null +++ b/task_sdk/tests/test_hello.py @@ -0,0 +1,23 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from airflow.sdk import hello + + +def test_hello(): + assert hello() == "Hello from task-sdk!" From 8ee9d01e1c8102d18f7ca5c59d38c91fdf72b70e Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Sat, 12 Oct 2024 17:33:58 +0100 Subject: [PATCH 090/125] Upgrade Trove classifier to `2024.10.12` (#42961) Test was failing: https://github.com/apache/airflow/actions/runs/11306055404/job/31446236535 ``` pre-commit hook(s) made changes. If you are seeing this message in CI, reproduce locally with: `pre-commit run --all-files`. To run `pre-commit` as part of git workflow, use `pre-commit install`. All changes made by hooks: diff --git a/pyproject.toml b/pyproject.toml index 844e1b6..5e9f587 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ requires = [ "pluggy==1.5.0", "smmap==5.0.1", "tomli==2.0.2; python_version < '3.11'", - "trove-classifiers==2024.10.11", + "trove-classifiers==2024.10.12", ] build-backend = "hatchling.build" ``` --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 6fafa7e99b06c..6edf4e5c71175 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ requires = [ "pluggy==1.5.0", "smmap==5.0.1", "tomli==2.0.2; python_version < '3.11'", - "trove-classifiers==2024.10.11", + "trove-classifiers==2024.10.12", ] build-backend = "hatchling.build" From 48c7b22fc82f37d94490036d5cdffb27454261f4 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Sat, 12 Oct 2024 22:55:26 +0100 Subject: [PATCH 091/125] Add missed brackets for our dev script for Spell checks (#42965) A nit PR while I was looking at our doc builds --- dev/breeze/src/airflow_breeze/utils/spelling_checks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/breeze/src/airflow_breeze/utils/spelling_checks.py b/dev/breeze/src/airflow_breeze/utils/spelling_checks.py index 1dd9d7ec8cb2b..ee4c37e338f44 100644 --- a/dev/breeze/src/airflow_breeze/utils/spelling_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/spelling_checks.py @@ -167,9 +167,9 @@ def display_spelling_error_summary(spelling_errors: dict[str, list[SpellingError """ console.print(msg) console.print() - console.print + console.print() console.print("[red]" + "#" * 30 + " End docs build errors summary " + "#" * 30 + "[/]") - console.print + console.print() def _display_error(error: SpellingError): From 95c46ec135349c8e8d3150d16f18ab65f8240f3e Mon Sep 17 00:00:00 2001 From: David Blain Date: Sun, 13 Oct 2024 00:17:37 +0200 Subject: [PATCH 092/125] FIX: Don't raise a warning in ExecutorSafeguard when execute is called from an extended operator (#42849) * refactor: Don't raise a warning when execute is called from an extended operator, as this should always be allowed. * refactored: Fixed import of test_utils in test_dag_run --------- Co-authored-by: David Blain --- airflow/models/baseoperator.py | 11 ++++++++++- tests/models/test_baseoperatormeta.py | 24 +++++++++++++++++++++++- 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index 7b5c8e7c4f8c4..0c3d119be1983 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -34,6 +34,7 @@ import warnings from datetime import datetime, timedelta from functools import total_ordering, wraps +from threading import local from types import FunctionType from typing import ( TYPE_CHECKING, @@ -392,6 +393,8 @@ class ExecutorSafeguard: """ test_mode = conf.getboolean("core", "unit_test_mode") + _sentinel = local() + _sentinel.callers = {} @classmethod def decorator(cls, func): @@ -399,7 +402,13 @@ def decorator(cls, func): def wrapper(self, *args, **kwargs): from airflow.decorators.base import DecoratedOperator - sentinel = kwargs.pop(f"{self.__class__.__name__}__sentinel", None) + sentinel_key = f"{self.__class__.__name__}__sentinel" + sentinel = kwargs.pop(sentinel_key, None) + + if sentinel: + cls._sentinel.callers[sentinel_key] = sentinel + else: + sentinel = cls._sentinel.callers.pop(f"{func.__qualname__.split('.')[0]}__sentinel", None) if not cls.test_mode and not sentinel == _sentinel and not isinstance(self, DecoratedOperator): message = f"{self.__class__.__name__}.{func.__name__} cannot be called outside TaskInstance!" diff --git a/tests/models/test_baseoperatormeta.py b/tests/models/test_baseoperatormeta.py index 6c6567b23899e..5244e86b2c386 100644 --- a/tests/models/test_baseoperatormeta.py +++ b/tests/models/test_baseoperatormeta.py @@ -40,6 +40,11 @@ def execute(self, context: Context) -> Any: return f"Hello {self.owner}!" +class ExtendedHelloWorldOperator(HelloWorldOperator): + def execute(self, context: Context) -> Any: + return super().execute(context) + + class TestExecutorSafeguard: def setup_method(self): ExecutorSafeguard.test_mode = False @@ -49,12 +54,29 @@ def teardown_method(self, method): @pytest.mark.skip_if_database_isolation_mode # Does not work in db isolation mode @pytest.mark.db_test - def test_executor_when_classic_operator_called_from_dag(self, dag_maker): + @patch.object(HelloWorldOperator, "log") + def test_executor_when_classic_operator_called_from_dag(self, mock_log, dag_maker): with dag_maker() as dag: HelloWorldOperator(task_id="hello_operator") dag_run = dag.test() assert dag_run.state == DagRunState.SUCCESS + mock_log.warning.assert_not_called() + + @pytest.mark.skip_if_database_isolation_mode # Does not work in db isolation mode + @pytest.mark.db_test + @patch.object(HelloWorldOperator, "log") + def test_executor_when_extended_classic_operator_called_from_dag( + self, + mock_log, + dag_maker, + ): + with dag_maker() as dag: + ExtendedHelloWorldOperator(task_id="hello_operator") + + dag_run = dag.test() + assert dag_run.state == DagRunState.SUCCESS + mock_log.warning.assert_not_called() @pytest.mark.skip_if_database_isolation_mode # Does not work in db isolation mode @pytest.mark.parametrize( From 3661a089b2d620faeee62b908dae4450b546aba2 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sun, 13 Oct 2024 02:31:51 +0200 Subject: [PATCH 093/125] Upgrade Helm Chart dependencies to latest released (#42816) From 8b1c57a19992acf64c294626bdb0244f82a1e693 Mon Sep 17 00:00:00 2001 From: LIU ZHE YOU <68415893+jason810496@users.noreply.github.com> Date: Sun, 13 Oct 2024 09:55:16 +0800 Subject: [PATCH 094/125] Fix PythonOperator DAG error when DAG has hyphen in name (#42902) --- airflow/utils/file.py | 2 +- tests/utils/test_file.py | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/airflow/utils/file.py b/airflow/utils/file.py index 2e39eb7dd7b52..86b7a7891ca8f 100644 --- a/airflow/utils/file.py +++ b/airflow/utils/file.py @@ -355,6 +355,6 @@ def get_unique_dag_module_name(file_path: str) -> str: """Return a unique module name in the format unusual_prefix_{sha1 of module's file path}_{original module name}.""" if isinstance(file_path, str): path_hash = hashlib.sha1(file_path.encode("utf-8")).hexdigest() - org_mod_name = Path(file_path).stem + org_mod_name = re2.sub(r"[.-]", "_", Path(file_path).stem) return MODIFIED_DAG_MODULE_NAME.format(path_hash=path_hash, module_name=org_mod_name) raise ValueError("file_path should be a string to generate unique module name") diff --git a/tests/utils/test_file.py b/tests/utils/test_file.py index 2ffb89ec34196..9424b90a92cf4 100644 --- a/tests/utils/test_file.py +++ b/tests/utils/test_file.py @@ -212,3 +212,21 @@ def test_get_modules_from_invalid_file(self): modules = list(file_utils.iter_airflow_imports(file_path)) assert len(modules) == 0 + + +@pytest.mark.parametrize( + "edge_filename, expected_modification", + [ + ("test_dag.py", "unusual_prefix_mocked_path_hash_sha1_test_dag"), + ("test-dag.py", "unusual_prefix_mocked_path_hash_sha1_test_dag"), + ("test-dag-1.py", "unusual_prefix_mocked_path_hash_sha1_test_dag_1"), + ("test-dag_1.py", "unusual_prefix_mocked_path_hash_sha1_test_dag_1"), + ("test-dag.dev.py", "unusual_prefix_mocked_path_hash_sha1_test_dag_dev"), + ("test_dag.prod.py", "unusual_prefix_mocked_path_hash_sha1_test_dag_prod"), + ], +) +def test_get_unique_dag_module_name(edge_filename, expected_modification): + with mock.patch("hashlib.sha1") as mocked_sha1: + mocked_sha1.return_value.hexdigest.return_value = "mocked_path_hash_sha1" + modify_module_name = file_utils.get_unique_dag_module_name(edge_filename) + assert modify_module_name == expected_modification From 51f430285e20e1ef43989e3bc123ae97b81c36e4 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sun, 13 Oct 2024 04:46:16 +0200 Subject: [PATCH 095/125] fIX handling removal of dependencies (#42967) There was a problem with CI image builds when cache has been disabled - they still used "pre-cached" main version (unlike PROD builds). This PR synchronizes the behaviour between CI and PROD builds. For removing dependencies, you need to set both: * `disable docker cache` label * incerase DEPENDENCIES_EPOCH_NUMBER in `Dockerfile.ci` Comments and documentation in both places has been updated to reflect it. Since documentation for labels has been updated, Part of this PR is to improve the description of possible labels that could be used during the build. The description grew from a small number of labels to a "wall of text" that was difficult to read. This PR reformats it in the form of table that makes it far easier to see actions that the maintainer can do and what labels should be set for each of them. --- .../workflows/additional-ci-image-checks.yml | 7 ++ .../workflows/additional-prod-image-tests.yml | 6 + .github/workflows/build-images.yml | 4 + .github/workflows/ci-image-build.yml | 5 + .github/workflows/ci.yml | 6 + .github/workflows/finalize-tests.yml | 5 + .github/workflows/prod-image-build.yml | 6 + .github/workflows/prod-image-extra-checks.yml | 6 + .github/workflows/push-image-cache.yml | 6 + Dockerfile.ci | 5 +- dev/breeze/doc/ci/07_debugging.md | 67 ++++------- .../doc/images/output_ci-image_build.svg | 112 +++++++++--------- .../doc/images/output_ci-image_build.txt | 2 +- .../doc/images/output_prod-image_build.txt | 2 +- .../commands/ci_image_commands.py | 4 + .../commands/ci_image_commands_config.py | 1 + .../commands/common_image_options.py | 7 ++ .../commands/production_image_commands.py | 7 +- .../airflow_breeze/params/build_ci_params.py | 1 - .../params/build_prod_params.py | 5 - .../params/common_build_params.py | 5 +- .../airflow_breeze/utils/selective_checks.py | 4 + dev/breeze/tests/test_selective_checks.py | 20 ++++ 23 files changed, 179 insertions(+), 114 deletions(-) diff --git a/.github/workflows/additional-ci-image-checks.yml b/.github/workflows/additional-ci-image-checks.yml index ae9efdb6b0340..878800324b784 100644 --- a/.github/workflows/additional-ci-image-checks.yml +++ b/.github/workflows/additional-ci-image-checks.yml @@ -64,6 +64,10 @@ on: # yamllint disable-line rule:truthy description: "Docker cache specification to build the image (registry, local, disabled)." required: true type: string + disable-airflow-repo-cache: + description: "Disable airflow repo cache read from main." + required: true + type: string canary-run: description: "Whether this is a canary run (true/false)" required: true @@ -112,6 +116,7 @@ jobs: use-uv: "true" include-success-outputs: ${{ inputs.include-success-outputs }} docker-cache: ${{ inputs.docker-cache }} + disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} if: inputs.branch == 'main' # Check that after earlier cache push, breeze command will build quickly @@ -168,3 +173,5 @@ jobs: # use-uv: "true" # upgrade-to-newer-dependencies: ${{ inputs.upgrade-to-newer-dependencies }} # docker-cache: ${{ inputs.docker-cache }} +# disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} +# diff --git a/.github/workflows/additional-prod-image-tests.yml b/.github/workflows/additional-prod-image-tests.yml index 4c9606e1343e6..5ffd2001e0e26 100644 --- a/.github/workflows/additional-prod-image-tests.yml +++ b/.github/workflows/additional-prod-image-tests.yml @@ -48,6 +48,10 @@ on: # yamllint disable-line rule:truthy description: "Docker cache specification to build the image (registry, local, disabled)." required: true type: string + disable-airflow-repo-cache: + description: "Disable airflow repo cache read from main." + required: true + type: string canary-run: description: "Whether to run the canary run (true/false)" required: true @@ -72,6 +76,7 @@ jobs: chicken-egg-providers: ${{ inputs.chicken-egg-providers }} constraints-branch: ${{ inputs.constraints-branch }} docker-cache: ${{ inputs.docker-cache }} + disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} if: inputs.default-branch == 'main' && inputs.canary-run == 'true' prod-image-extra-checks-release-branch: @@ -89,6 +94,7 @@ jobs: chicken-egg-providers: ${{ inputs.chicken-egg-providers }} constraints-branch: ${{ inputs.constraints-branch }} docker-cache: ${{ inputs.docker-cache }} + disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} if: inputs.default-branch != 'main' && inputs.canary-run == 'true' test-examples-of-prod-image-building: diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 6c6d55d75045e..943b01f8f8916 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -71,6 +71,7 @@ jobs: prod-image-build: ${{ steps.selective-checks.outputs.prod-image-build }} docker-cache: ${{ steps.selective-checks.outputs.docker-cache }} default-branch: ${{ steps.selective-checks.outputs.default-branch }} + disable-airflow-repo-cache: ${{ steps.selective-checks.outputs.disable-airflow-repo-cache }} constraints-branch: ${{ steps.selective-checks.outputs.default-constraints-branch }} runs-on-as-json-default: ${{ steps.selective-checks.outputs.runs-on-as-json-default }} runs-on-as-json-public: ${{ steps.selective-checks.outputs.runs-on-as-json-public }} @@ -210,6 +211,8 @@ jobs: constraints-branch: ${{ needs.build-info.outputs.constraints-branch }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} + generate-constraints: name: Generate constraints @@ -256,3 +259,4 @@ jobs: upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} diff --git a/.github/workflows/ci-image-build.yml b/.github/workflows/ci-image-build.yml index 1c4b31b55a604..b8e2feac1755f 100644 --- a/.github/workflows/ci-image-build.yml +++ b/.github/workflows/ci-image-build.yml @@ -95,6 +95,10 @@ on: # yamllint disable-line rule:truthy description: "Docker cache specification to build the image (registry, local, disabled)." required: true type: string + disable-airflow-repo-cache: + description: "Disable airflow repo cache read from main." + required: true + type: string jobs: build-ci-images: strategy: @@ -171,6 +175,7 @@ ${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" --python "${{ matrix.python-version }}" --platform "${{ inputs.platform }}" env: DOCKER_CACHE: ${{ inputs.docker-cache }} + DISABLE_AIRFLOW_REPO_CACHE: ${{ inputs.disable-airflow-repo-cache }} INSTALL_MYSQL_CLIENT_TYPE: ${{ inputs.install-mysql-client-type }} UPGRADE_TO_NEWER_DEPENDENCIES: ${{ inputs.upgrade-to-newer-dependencies }} # You can override CONSTRAINTS_GITHUB_REPOSITORY by setting secret in your repo but by default the diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2267154b03a7a..8a9d716cd8421 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -57,6 +57,7 @@ jobs: outputs: image-tag: ${{ github.event.pull_request.head.sha || github.sha }} docker-cache: ${{ steps.selective-checks.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ steps.selective-checks.outputs.disable-airflow-repo-cache }} affected-providers-list-as-string: >- ${{ steps.selective-checks.outputs.affected-providers-list-as-string }} upgrade-to-newer-dependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} @@ -208,6 +209,7 @@ jobs: upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} wait-for-ci-images: timeout-minutes: 120 @@ -264,6 +266,7 @@ jobs: upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} skip-pre-commits: ${{ needs.build-info.outputs.skip-pre-commits }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} canary-run: ${{ needs.build-info.outputs.canary-run }} latest-versions-only: ${{ needs.build-info.outputs.latest-versions-only }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} @@ -559,6 +562,7 @@ jobs: chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} wait-for-prod-images: timeout-minutes: 80 @@ -615,6 +619,7 @@ jobs: upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} canary-run: ${{ needs.build-info.outputs.canary-run }} if: needs.build-info.outputs.prod-image-build == 'true' @@ -670,6 +675,7 @@ jobs: upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} + disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} canary-run: ${{ needs.build-info.outputs.canary-run }} notify-slack-failure: diff --git a/.github/workflows/finalize-tests.yml b/.github/workflows/finalize-tests.yml index 8b392ba204664..6fae105e0a646 100644 --- a/.github/workflows/finalize-tests.yml +++ b/.github/workflows/finalize-tests.yml @@ -64,6 +64,10 @@ on: # yamllint disable-line rule:truthy description: "Docker cache specification to build the image (registry, local, disabled)." required: true type: string + disable-airflow-repo-cache: + description: "Disable airflow repo cache read from main." + required: true + type: string include-success-outputs: description: "Whether to include success outputs (true/false)" required: true @@ -148,6 +152,7 @@ jobs: use-uv: "true" include-success-outputs: ${{ inputs.include-success-outputs }} docker-cache: ${{ inputs.docker-cache }} + disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} if: inputs.canary-run == 'true' # push-buildx-cache-to-github-registry-arm: diff --git a/.github/workflows/prod-image-build.yml b/.github/workflows/prod-image-build.yml index 75d9d0054ec78..db80a6ec247ec 100644 --- a/.github/workflows/prod-image-build.yml +++ b/.github/workflows/prod-image-build.yml @@ -114,6 +114,10 @@ on: # yamllint disable-line rule:truthy description: "Docker cache specification to build the image (registry, local, disabled)." required: true type: string + disable-airflow-repo-cache: + description: "Disable airflow repo cache read from main." + required: true + type: string jobs: build-prod-packages: @@ -276,6 +280,7 @@ ${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" env: PUSH: ${{ inputs.push-image }} DOCKER_CACHE: ${{ inputs.docker-cache }} + DISABLE_AIRFLOW_REPO_CACHE: ${{ inputs.disable-airflow-repo-cache }} DEBIAN_VERSION: ${{ inputs.debian-version }} INSTALL_MYSQL_CLIENT_TYPE: ${{ inputs.install-mysql-client-type }} UPGRADE_TO_NEWER_DEPENDENCIES: ${{ inputs.upgrade-to-newer-dependencies }} @@ -291,6 +296,7 @@ ${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" env: PUSH: ${{ inputs.push-image }} DOCKER_CACHE: ${{ inputs.docker-cache }} + DISABLE_AIRFLOW_REPO_CACHE: ${{ inputs.disable-airflow-repo-cache }} DEBIAN_VERSION: ${{ inputs.debian-version }} INSTALL_MYSQL_CLIENT_TYPE: ${{ inputs.install-mysql-client-type }} UPGRADE_TO_NEWER_DEPENDENCIES: ${{ inputs.upgrade-to-newer-dependencies }} diff --git a/.github/workflows/prod-image-extra-checks.yml b/.github/workflows/prod-image-extra-checks.yml index 82d327ba2f16d..bb63faef7b243 100644 --- a/.github/workflows/prod-image-extra-checks.yml +++ b/.github/workflows/prod-image-extra-checks.yml @@ -63,6 +63,10 @@ on: # yamllint disable-line rule:truthy description: "Docker cache specification to build the image (registry, local, disabled)." required: true type: string + disable-airflow-repo-cache: + description: "Disable airflow repo cache read from main." + required: true + type: string jobs: myssql-client-image: uses: ./.github/workflows/prod-image-build.yml @@ -84,6 +88,7 @@ jobs: chicken-egg-providers: ${{ inputs.chicken-egg-providers }} constraints-branch: ${{ inputs.constraints-branch }} docker-cache: ${{ inputs.docker-cache }} + disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} pip-image: uses: ./.github/workflows/prod-image-build.yml @@ -107,3 +112,4 @@ jobs: chicken-egg-providers: ${{ inputs.chicken-egg-providers }} constraints-branch: ${{ inputs.constraints-branch }} docker-cache: ${{ inputs.docker-cache }} + disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} diff --git a/.github/workflows/push-image-cache.yml b/.github/workflows/push-image-cache.yml index 0dc83a3fd66ea..10a33275ad3f3 100644 --- a/.github/workflows/push-image-cache.yml +++ b/.github/workflows/push-image-cache.yml @@ -76,6 +76,10 @@ on: # yamllint disable-line rule:truthy description: "Docker cache specification to build the image (registry, local, disabled)." required: true type: string + disable-airflow-repo-cache: + description: "Disable airflow repo cache read from main." + required: true + type: string jobs: push-ci-image-cache: name: "Push CI ${{ inputs.cache-type }}:${{ matrix.python }} image cache " @@ -100,6 +104,7 @@ jobs: DEFAULT_BRANCH: ${{ inputs.branch }} DEFAULT_CONSTRAINTS_BRANCH: ${{ inputs.constraints-branch }} DOCKER_CACHE: ${{ inputs.docker-cache }} + DISABLE_AIRFLOW_REPO_CACHE: ${{ inputs.disable-airflow-repo-cache }} GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} @@ -162,6 +167,7 @@ jobs: DEFAULT_BRANCH: ${{ inputs.branch }} DEFAULT_CONSTRAINTS_BRANCH: ${{ inputs.constraints-branch }} DOCKER_CACHE: ${{ inputs.docker-cache }} + DISABLE_AIRFLOW_REPO_CACHE: ${{ inputs.disable-airflow-repo-cache }} GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} diff --git a/Dockerfile.ci b/Dockerfile.ci index 9339e9af6d6f8..464e33f147fb1 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -1180,7 +1180,10 @@ SHELL ["/bin/bash", "-o", "pipefail", "-o", "errexit", "-o", "nounset", "-o", "n ARG PYTHON_BASE_IMAGE ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" -# By increasing this number we can do force build of all dependencies +# By increasing this number we can do force build of all dependencies. +# NOTE! When you want to make sure dependencies are installed from scratch in your PR after removing +# some dependencies, you also need to set "disable image cache" in your PR to make sure the image is +# not built using the "main" version of those dependencies. ARG DEPENDENCIES_EPOCH_NUMBER="11" # Make sure noninteractive debian install is used and language variables set diff --git a/dev/breeze/doc/ci/07_debugging.md b/dev/breeze/doc/ci/07_debugging.md index 6e6d46584edfa..9e7173ae84721 100644 --- a/dev/breeze/doc/ci/07_debugging.md +++ b/dev/breeze/doc/ci/07_debugging.md @@ -21,11 +21,11 @@ **Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* -- [Debugging CI Jobs in Github Actions](#debugging-ci-jobs-in-github-actions) +- [Debugging CI Jobs in Github Actions and changing their behaviour](#debugging-ci-jobs-in-github-actions-and-changing-their-behaviour) -# Debugging CI Jobs in Github Actions +# Debugging CI Jobs in Github Actions and changing their behaviour The CI jobs are notoriously difficult to test, because you can only really see results of it when you run them in CI environment, and the @@ -39,49 +39,28 @@ difficulty is that `Build Images` workflow is `pull-request-target` type, which means that it will always run using the `main` version - no matter what is in your Pull Request. -There are several ways how you can debug the CI jobs when you are -maintainer. +There are several ways how you can debug the CI jobs and modify their +behaviour when you are maintainer. + +When you create the PR you can set one of the labels below, also +in some cases, you need to run the PR as coming from the "apache" +repository rather than from your fork. + +You can also apply the label later and rebase the PR or close/reopen +the PR to apply the label to the PR. + +| Action to perform | Label to set | PR from "apache" repo | +|------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------|:---------------------:| +| Run the build with all combinations of all
python, backends, kubernetes etc on PR,
and run all types of tests for all test
groups. | full tests needed | | +| Force to use public runners for the build | use public runners | | +| Debug resources used during the build for
parallel jobs | debug ci resources | | +| Force running PR on latest versions of
python, backends, kubernetes etc. when you
want to save resources and test only latest
versions | latest versions only | | +| Force running PR on minimal (default)
versions of python, backends, kubernetes etc.
in order to save resources and run tests only
for minimum versions | default versions only | | +| Make sure to clean dependency cache
usually when removing dependencies
You also need to increase
`DEPENDENCIES_EPOCH_NUMBER` in `Dockerfile.ci` | disable image cache | | +| Change build images workflows, breeze code or
scripts that are used during image build
so that the scripts can be modified by PR
| | Yes | +| Treat your build as "canary" build - including
updating constraints and pushing "main"
documentation. | | Yes | +| Remove any behaviour specific for the committers
such as using different runners by default. | non committer build | | -- When you want to tests the build with all combinations of all python, - backends etc on regular PR, add `full tests needed` label to the PR. -- When you want to test maintainer PR using public runners, add - `public runners` label to the PR -- When you want to see resources used by the run, add - `debug ci resources` label to the PR -- When you want to test changes to breeze that include changes to how - images are build you should push your PR to `apache` repository not to - your fork. This will run the images as part of the `CI` workflow - rather than using `Build images` workflow and use the same breeze - version for building image and testing -- When you want to test changes to workflows and CI scripts you can set - `all versions` label to the PR or `latest versions only`. - This will make the PR run using "all" versions of - Python, Kubernetes and the DBS. By default - unless you also change - dependencies in `pyproject.toml` or `generated/provider_dependencies.json` - such PRs will only use "default" versions of Python, Kubernetes and - DBs. This is useful when you want to test changes to the CI scripts - are not affected by the versions of Python, Kubernetes and DBs. -- Even if you change dependencies in `pyproject.toml`, or - `generated/provider_dependencies.json`, when you want to test changes to workflows - and CI scripts you can set `default versions only` label to the - This will make the PR run using the default (or latest) versions of - Python and Kubernetes and DBs. This is useful when you want to test - changes to the CI scripts and workflows and you want to use far - less resources than the full tests. -- When you want to test changes to `build-images.yml` workflow you - should push your branch as `main` branch in your local fork. This will - run changed `build-images.yml` workflow as it will be in `main` branch - of your fork -- When you are a committer and you change build images workflow, together - with build scripts, your build might fail because your scripts are used - in `build-images.yml` workflow, but the workflow is run using the `main` - version. Setting `non committer build` label will make your PR run using - the main version of the scripts and the workflow -- When you are a committer want to test how changes in your workflow affect - `canary` run, as maintainer, you should push your PR to `apache` repository - not to your fork and set `canary` label to the PR -- When you are a committer and want to test if the tests are passing if the - image is freshly built without cache, you can set `disable image cache` label. ----- diff --git a/dev/breeze/doc/images/output_ci-image_build.svg b/dev/breeze/doc/images/output_ci-image_build.svg index 6dd856c3dc8db..62339f7053924 100644 --- a/dev/breeze/doc/images/output_ci-image_build.svg +++ b/dev/breeze/doc/images/output_ci-image_build.svg @@ -1,4 +1,4 @@ - +
--commit-shaCommit SHA that is used to build the images.(TEXT) --debian-versionDebian version used in Airflow image as base for building images.(bookworm) [default: bookworm]                                               ---install-mysql-client-typeWhich client to choose when installing.(mariadb | mysql) ---python-imageIf specified this is the base python image used to build the image. Should be      -something like: python:VERSION-slim-bookworm.                                      -(TEXT)                                                                             ---use-uv/--no-use-uvUse uv instead of pip as packaging tool to build the image.[default: use-uv] ---uv-http-timeoutTimeout for requests that UV makes (only used in case of UV builds). -(INTEGER RANGE)                                                      -[default: 300; x>=1]                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Selecting constraint location (for power users) ────────────────────────────────────────────────────────────────────╮ ---airflow-constraints-locationLocation of airflow constraints to use (remote URL or local context file).(TEXT) ---airflow-constraints-modeMode of constraints for Airflow for CI image building.                  -(constraints-source-providers | constraints | constraints-no-providers) -[default: constraints-source-providers]                                 ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Choosing dependencies and extras (for power users) ─────────────────────────────────────────────────────────────────╮ ---additional-airflow-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Backtracking options ───────────────────────────────────────────────────────────────────────────────────────────────╮ ---build-timeout-minutesOptional timeout for the build in minutes. Useful to detect `pip`         -backtracking problems.                                                    -(INTEGER)                                                                 ---eager-upgrade-additional-requirementsOptional additional requirements to upgrade eagerly to avoid backtracking -(see `breeze ci find-backtracking-candidates`).                           -(TEXT)                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---builderBuildx builder used to perform `docker buildx build` commands.(TEXT) -[default: autodetect]                                          ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---pushPush image after building it. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Github authentication ──────────────────────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---github-tokenThe token used to authenticate to GitHub.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +--disable-airflow-repo-cacheDisable cache from Airflow repository during building. +--install-mysql-client-typeWhich client to choose when installing.(mariadb | mysql) +--python-imageIf specified this is the base python image used to build the image. Should be      +something like: python:VERSION-slim-bookworm.                                      +(TEXT)                                                                             +--use-uv/--no-use-uvUse uv instead of pip as packaging tool to build the image.[default: use-uv] +--uv-http-timeoutTimeout for requests that UV makes (only used in case of UV builds). +(INTEGER RANGE)                                                      +[default: 300; x>=1]                                                 +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Selecting constraint location (for power users) ────────────────────────────────────────────────────────────────────╮ +--airflow-constraints-locationLocation of airflow constraints to use (remote URL or local context file).(TEXT) +--airflow-constraints-modeMode of constraints for Airflow for CI image building.                  +(constraints-source-providers | constraints | constraints-no-providers) +[default: constraints-source-providers]                                 +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Choosing dependencies and extras (for power users) ─────────────────────────────────────────────────────────────────╮ +--additional-airflow-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Backtracking options ───────────────────────────────────────────────────────────────────────────────────────────────╮ +--build-timeout-minutesOptional timeout for the build in minutes. Useful to detect `pip`         +backtracking problems.                                                    +(INTEGER)                                                                 +--eager-upgrade-additional-requirementsOptional additional requirements to upgrade eagerly to avoid backtracking +(see `breeze ci find-backtracking-candidates`).                           +(TEXT)                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--builderBuildx builder used to perform `docker buildx build` commands.(TEXT) +[default: autodetect]                                          +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--pushPush image after building it. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Github authentication ──────────────────────────────────────────────────────────────────────────────────────────────╮ +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--github-tokenThe token used to authenticate to GitHub.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_ci-image_build.txt b/dev/breeze/doc/images/output_ci-image_build.txt index 905847bedc783..a53f9ce997325 100644 --- a/dev/breeze/doc/images/output_ci-image_build.txt +++ b/dev/breeze/doc/images/output_ci-image_build.txt @@ -1 +1 @@ -686950c27e41fa50cf22abb8c25e496a +b03ca6dcb44ef05f9c27f72318ff9500 diff --git a/dev/breeze/doc/images/output_prod-image_build.txt b/dev/breeze/doc/images/output_prod-image_build.txt index b8131522f128e..7799e6f009ebf 100644 --- a/dev/breeze/doc/images/output_prod-image_build.txt +++ b/dev/breeze/doc/images/output_prod-image_build.txt @@ -1 +1 @@ -3d6fee0b510d006927e45a56aa45d505 +d0214e8e95fcb56c91e0e416690eb24f diff --git a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py index cc6274775f4dd..3c49e530fcec2 100644 --- a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py @@ -41,6 +41,7 @@ option_debian_version, option_dev_apt_command, option_dev_apt_deps, + option_disable_airflow_repo_cache, option_docker_cache, option_image_tag_for_building, option_image_tag_for_pulling, @@ -296,6 +297,7 @@ def get_exitcode(status: int) -> int: @option_debug_resources @option_dev_apt_command @option_dev_apt_deps +@option_disable_airflow_repo_cache @option_docker_cache @option_docker_host @option_dry_run @@ -339,6 +341,7 @@ def build( debug_resources: bool, dev_apt_command: str | None, dev_apt_deps: str | None, + disable_airflow_repo_cache: bool, docker_cache: str, docker_host: str | None, eager_upgrade_additional_requirements: str | None, @@ -413,6 +416,7 @@ def run_build(ci_image_params: BuildCiParams) -> None: debian_version=debian_version, dev_apt_command=dev_apt_command, dev_apt_deps=dev_apt_deps, + disable_airflow_repo_cache=disable_airflow_repo_cache, docker_cache=docker_cache, docker_host=docker_host, eager_upgrade_additional_requirements=eager_upgrade_additional_requirements, diff --git a/dev/breeze/src/airflow_breeze/commands/ci_image_commands_config.py b/dev/breeze/src/airflow_breeze/commands/ci_image_commands_config.py index 2c5d4a4068ed6..0fe458c9d6ad2 100644 --- a/dev/breeze/src/airflow_breeze/commands/ci_image_commands_config.py +++ b/dev/breeze/src/airflow_breeze/commands/ci_image_commands_config.py @@ -57,6 +57,7 @@ "--additional-pip-install-flags", "--commit-sha", "--debian-version", + "--disable-airflow-repo-cache", "--install-mysql-client-type", "--python-image", "--use-uv", diff --git a/dev/breeze/src/airflow_breeze/commands/common_image_options.py b/dev/breeze/src/airflow_breeze/commands/common_image_options.py index 1b8f9460949e3..e80b2f284d266 100644 --- a/dev/breeze/src/airflow_breeze/commands/common_image_options.py +++ b/dev/breeze/src/airflow_breeze/commands/common_image_options.py @@ -108,6 +108,12 @@ help="Apt dev dependencies to use when building the images.", envvar="DEV_APT_DEPS", ) +option_disable_airflow_repo_cache = click.option( + "--disable-airflow-repo-cache", + help="Disable cache from Airflow repository during building.", + is_flag=True, + envvar="DISABLE_AIRFLOW_REPO_CACHE", +) option_docker_cache = click.option( "-c", "--docker-cache", @@ -115,6 +121,7 @@ default=ALLOWED_BUILD_CACHE[0], show_default=True, type=BetterChoice(ALLOWED_BUILD_CACHE), + envvar="DOCKER_CACHE", ) option_image_tag_for_pulling = click.option( "-t", diff --git a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py index 6e21aabffdff5..d266dd027bdc0 100644 --- a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py @@ -36,6 +36,7 @@ option_debian_version, option_dev_apt_command, option_dev_apt_deps, + option_disable_airflow_repo_cache, option_docker_cache, option_image_tag_for_building, option_image_tag_for_pulling, @@ -192,11 +193,6 @@ def prod_image(): @click.option("--disable-mysql-client-installation", help="Do not install MySQL client.", is_flag=True) @click.option("--disable-mssql-client-installation", help="Do not install MsSQl client.", is_flag=True) @click.option("--disable-postgres-client-installation", help="Do not install Postgres client.", is_flag=True) -@click.option( - "--disable-airflow-repo-cache", - help="Disable cache from Airflow repository during building.", - is_flag=True, -) @click.option( "--install-airflow-reference", help="Install Airflow using GitHub tag or branch.", @@ -222,6 +218,7 @@ def prod_image(): @option_debug_resources @option_dev_apt_command @option_dev_apt_deps +@option_disable_airflow_repo_cache @option_docker_cache @option_docker_host @option_dry_run diff --git a/dev/breeze/src/airflow_breeze/params/build_ci_params.py b/dev/breeze/src/airflow_breeze/params/build_ci_params.py index 05179df07b8c4..c38776a3f6144 100644 --- a/dev/breeze/src/airflow_breeze/params/build_ci_params.py +++ b/dev/breeze/src/airflow_breeze/params/build_ci_params.py @@ -34,7 +34,6 @@ class BuildCiParams(CommonBuildParams): airflow_constraints_mode: str = "constraints-source-providers" airflow_constraints_reference: str = DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH airflow_extras: str = "devel-ci" - airflow_pre_cached_pip_packages: bool = True force_build: bool = False upgrade_to_newer_dependencies: bool = False upgrade_on_failure: bool = False diff --git a/dev/breeze/src/airflow_breeze/params/build_prod_params.py b/dev/breeze/src/airflow_breeze/params/build_prod_params.py index 2533c30d6f3ae..6fa5828b40fa5 100644 --- a/dev/breeze/src/airflow_breeze/params/build_prod_params.py +++ b/dev/breeze/src/airflow_breeze/params/build_prod_params.py @@ -44,7 +44,6 @@ class BuildProdParams(CommonBuildParams): airflow_constraints_reference: str = DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH cleanup_context: bool = False airflow_extras: str = field(default_factory=get_airflow_extras) - disable_airflow_repo_cache: bool = False disable_mssql_client_installation: bool = False disable_mysql_client_installation: bool = False disable_postgres_client_installation: bool = False @@ -186,10 +185,6 @@ def _extra_prod_docker_build_flags(self) -> list[str]: ) return extra_build_flags - @property - def airflow_pre_cached_pip_packages(self) -> str: - return "false" if self.disable_airflow_repo_cache else "true" - @property def install_mssql_client(self) -> str: return "false" if self.disable_mssql_client_installation else "true" diff --git a/dev/breeze/src/airflow_breeze/params/common_build_params.py b/dev/breeze/src/airflow_breeze/params/common_build_params.py index ce0f2c0063f50..535ba96480536 100644 --- a/dev/breeze/src/airflow_breeze/params/common_build_params.py +++ b/dev/breeze/src/airflow_breeze/params/common_build_params.py @@ -56,6 +56,7 @@ class CommonBuildParams: commit_sha: str | None = None dev_apt_command: str | None = None dev_apt_deps: str | None = None + disable_airflow_repo_cache: bool = False docker_cache: str = "registry" docker_host: str | None = os.environ.get("DOCKER_HOST") github_actions: str = os.environ.get("GITHUB_ACTIONS", "false") @@ -89,8 +90,8 @@ def image_type(self) -> str: raise NotImplementedError() @property - def airflow_pre_cached_pip_packages(self): - raise NotImplementedError() + def airflow_pre_cached_pip_packages(self) -> str: + return "false" if self.disable_airflow_repo_cache else "true" @property def airflow_base_image_name(self): diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index 0cb69a194713d..20bf514379f0b 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -1132,6 +1132,10 @@ def docker_cache(self) -> str: def debug_resources(self) -> bool: return DEBUG_CI_RESOURCES_LABEL in self._pr_labels + @cached_property + def disable_airflow_repo_cache(self) -> bool: + return self.docker_cache == "disabled" + @cached_property def helm_test_packages(self) -> str: return json.dumps(all_helm_test_packages()) diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index 6d031f70b875a..619dc8eec2f99 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -2308,6 +2308,26 @@ def test_mypy_matches( ("non committer build",), id="Committer regular PR - forcing non-committer build", ), + pytest.param( + ("README.md",), + { + "docker-cache": "disabled", + "disable-airflow-repo-cache": "true", + }, + "potiuk", + ("disable image cache",), + id="Disabled cache", + ), + pytest.param( + ("README.md",), + { + "docker-cache": "registry", + "disable-airflow-repo-cache": "false", + }, + "potiuk", + (), + id="Standard cache", + ), ], ) def test_pr_labels( From f1735b4c0c4743ccd747a60c88310d6ef45bbdc7 Mon Sep 17 00:00:00 2001 From: GPK Date: Sun, 13 Oct 2024 05:36:27 +0100 Subject: [PATCH 096/125] mark test_task_workflow_trigger_success as flaky (#42972) --- tests/triggers/test_external_task.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/triggers/test_external_task.py b/tests/triggers/test_external_task.py index ced867c4bd6aa..202eafb0b9e3d 100644 --- a/tests/triggers/test_external_task.py +++ b/tests/triggers/test_external_task.py @@ -36,6 +36,7 @@ class TestWorkflowTrigger: RUN_ID = "external_task_run_id" STATES = ["success", "fail"] + @pytest.mark.flaky(reruns=5) @mock.patch("airflow.triggers.external_task._get_count") @pytest.mark.asyncio async def test_task_workflow_trigger_success(self, mock_get_count): From cd5eb2ca4abc67ebc85fcaa44d12f3343d03fafa Mon Sep 17 00:00:00 2001 From: Aryan Khurana <101019909+AryanK1511@users.noreply.github.com> Date: Sun, 13 Oct 2024 06:34:02 -0400 Subject: [PATCH 097/125] Create a User Settings button with light/dark mode toggle as a menu item (#42964) --- airflow/ui/src/layouts/Nav/Nav.tsx | 16 +---- .../ui/src/layouts/Nav/UserSettingsButton.tsx | 58 +++++++++++++++++++ 2 files changed, 60 insertions(+), 14 deletions(-) create mode 100644 airflow/ui/src/layouts/Nav/UserSettingsButton.tsx diff --git a/airflow/ui/src/layouts/Nav/Nav.tsx b/airflow/ui/src/layouts/Nav/Nav.tsx index 9886b5eb75760..3b47595e8c173 100644 --- a/airflow/ui/src/layouts/Nav/Nav.tsx +++ b/airflow/ui/src/layouts/Nav/Nav.tsx @@ -21,7 +21,6 @@ import { Flex, Icon, Link, - useColorMode, useColorModeValue, VStack, } from "@chakra-ui/react"; @@ -32,9 +31,7 @@ import { FiDatabase, FiGlobe, FiHome, - FiMoon, FiSettings, - FiSun, } from "react-icons/fi"; import { AirflowPin } from "src/assets/AirflowPin"; @@ -42,9 +39,9 @@ import { DagIcon } from "src/assets/DagIcon"; import { DocsButton } from "./DocsButton"; import { NavButton } from "./NavButton"; +import { UserSettingsButton } from "./UserSettingsButton"; export const Nav = () => { - const { colorMode, toggleColorMode } = useColorMode(); const navBg = useColorModeValue("blue.100", "blue.900"); return ( @@ -106,16 +103,7 @@ export const Nav = () => { title="Return to legacy UI" /> - - ) : ( - - ) - } - onClick={toggleColorMode} - /> + ); diff --git a/airflow/ui/src/layouts/Nav/UserSettingsButton.tsx b/airflow/ui/src/layouts/Nav/UserSettingsButton.tsx new file mode 100644 index 0000000000000..c43c17b6d039b --- /dev/null +++ b/airflow/ui/src/layouts/Nav/UserSettingsButton.tsx @@ -0,0 +1,58 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { + IconButton, + Menu, + MenuButton, + useColorMode, + MenuItem, + MenuList, +} from "@chakra-ui/react"; +import { FiMoon, FiSun, FiUser } from "react-icons/fi"; + +import { navButtonProps } from "./navButtonProps"; + +export const UserSettingsButton = () => { + const { colorMode, toggleColorMode } = useColorMode(); + + return ( + + } + {...navButtonProps} + /> + + + {colorMode === "light" ? ( + <> + + Switch to Dark Mode + + ) : ( + <> + + Switch to Light Mode + + )} + + + + ); +}; From b92c66d45d206c670516f192662bfcf8ad34bec8 Mon Sep 17 00:00:00 2001 From: Kalyan R Date: Sun, 13 Oct 2024 16:05:10 +0530 Subject: [PATCH 098/125] AIP-84 Migrate delete Dag Run endpoint to FastAPI (#42910) * delete dag_run init * delete dag_run init * add delete dag run endpoint * add tests for delete dag run --- .../endpoints/dag_run_endpoint.py | 1 + airflow/api_fastapi/openapi/v1-generated.yaml | 52 +++++++++++++++++++ airflow/api_fastapi/views/public/dag_run.py | 15 ++++++ airflow/ui/openapi-gen/queries/common.ts | 3 ++ airflow/ui/openapi-gen/queries/queries.ts | 43 +++++++++++++++ .../ui/openapi-gen/requests/services.gen.ts | 31 +++++++++++ airflow/ui/openapi-gen/requests/types.gen.ts | 36 +++++++++++++ .../api_fastapi/views/public/test_dag_run.py | 12 +++++ 8 files changed, 193 insertions(+) diff --git a/airflow/api_connexion/endpoints/dag_run_endpoint.py b/airflow/api_connexion/endpoints/dag_run_endpoint.py index a862b7c969503..74eae13ddd4d0 100644 --- a/airflow/api_connexion/endpoints/dag_run_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_run_endpoint.py @@ -78,6 +78,7 @@ from airflow.api_connexion.types import APIResponse +@mark_fastapi_migration_done @security.requires_access_dag("DELETE", DagAccessEntity.RUN) @provide_session @action_logging diff --git a/airflow/api_fastapi/openapi/v1-generated.yaml b/airflow/api_fastapi/openapi/v1-generated.yaml index 7debfbb1008af..6c35ca212a2a6 100644 --- a/airflow/api_fastapi/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/openapi/v1-generated.yaml @@ -679,6 +679,58 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + delete: + tags: + - DagRun + summary: Delete Dag Run + description: Delete a DAG Run entry. + operationId: delete_dag_run + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + responses: + '204': + description: Successful Response + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' components: schemas: ConnectionResponse: diff --git a/airflow/api_fastapi/views/public/dag_run.py b/airflow/api_fastapi/views/public/dag_run.py index d39fb6f2f331c..2f44f03ea6103 100644 --- a/airflow/api_fastapi/views/public/dag_run.py +++ b/airflow/api_fastapi/views/public/dag_run.py @@ -42,3 +42,18 @@ async def get_dag_run( ) return DAGRunResponse.model_validate(dag_run, from_attributes=True) + + +@dag_run_router.delete( + "/{dag_run_id}", status_code=204, responses=create_openapi_http_exception_doc([400, 401, 403, 404]) +) +async def delete_dag_run(dag_id: str, dag_run_id: str, session: Annotated[Session, Depends(get_session)]): + """Delete a DAG Run entry.""" + dag_run = session.scalar(select(DagRun).filter_by(dag_id=dag_id, run_id=dag_run_id)) + + if dag_run is None: + raise HTTPException( + 404, f"The DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` was not found" + ) + + session.delete(dag_run) diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index aaff196c0791d..393fad520a679 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -197,3 +197,6 @@ export type ConnectionServiceDeleteConnectionMutationResult = Awaited< export type VariableServiceDeleteVariableMutationResult = Awaited< ReturnType >; +export type DagRunServiceDeleteDagRunMutationResult = Awaited< + ReturnType +>; diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index 19bb17b342a84..01e919cce53bf 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -506,3 +506,46 @@ export const useVariableServiceDeleteVariable = < }) as unknown as Promise, ...options, }); +/** + * Delete Dag Run + * Delete a DAG Run entry. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns void Successful Response + * @throws ApiError + */ +export const useDagRunServiceDeleteDagRun = < + TData = Common.DagRunServiceDeleteDagRunMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + }, + TContext + >({ + mutationFn: ({ dagId, dagRunId }) => + DagRunService.deleteDagRun({ + dagId, + dagRunId, + }) as unknown as Promise, + ...options, + }); diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 9a126aef25fbc..f0cbc099370f8 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -27,6 +27,8 @@ import type { GetVariableResponse, GetDagRunData, GetDagRunResponse, + DeleteDagRunData, + DeleteDagRunResponse, } from "./types.gen"; export class AssetService { @@ -391,4 +393,33 @@ export class DagRunService { }, }); } + + /** + * Delete Dag Run + * Delete a DAG Run entry. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns void Successful Response + * @throws ApiError + */ + public static deleteDagRun( + data: DeleteDagRunData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "DELETE", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}", + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + }, + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } } diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 45bfa51aec9c4..5fe3615c7d4e6 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -355,6 +355,13 @@ export type GetDagRunData = { export type GetDagRunResponse = DAGRunResponse; +export type DeleteDagRunData = { + dagId: string; + dagRunId: string; +}; + +export type DeleteDagRunResponse = void; + export type $OpenApiTs = { "/ui/next_run_assets/{dag_id}": { get: { @@ -657,5 +664,34 @@ export type $OpenApiTs = { 422: HTTPValidationError; }; }; + delete: { + req: DeleteDagRunData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; }; diff --git a/tests/api_fastapi/views/public/test_dag_run.py b/tests/api_fastapi/views/public/test_dag_run.py index 176ae07d3fadb..f8d6780e50a3a 100644 --- a/tests/api_fastapi/views/public/test_dag_run.py +++ b/tests/api_fastapi/views/public/test_dag_run.py @@ -136,3 +136,15 @@ def test_get_dag_run_not_found(test_client): assert response.status_code == 404 body = response.json() assert body["detail"] == "The DagRun with dag_id: `test_dag1` and run_id: `invalid` was not found" + + +class TestDeleteDagRun: + def test_delete_dag_run(self, test_client): + response = test_client.delete(f"/public/dags/{DAG1_ID}/dagRuns/{DAG1_RUN1_ID}") + assert response.status_code == 204 + + def test_delete_dag_run_not_found(self, test_client): + response = test_client.delete(f"/public/dags/{DAG1_ID}/dagRuns/invalid") + assert response.status_code == 404 + body = response.json() + assert body["detail"] == "The DagRun with dag_id: `test_dag1` and run_id: `invalid` was not found" From ad54501f3fccde549c7699f8ce46c711e38accc0 Mon Sep 17 00:00:00 2001 From: michaeljs-c <72604759+michaeljs-c@users.noreply.github.com> Date: Sun, 13 Oct 2024 13:57:51 +0100 Subject: [PATCH 099/125] Make datStats endpoint dag_ids parameter optional (#42955) * Make datStats dag_id parameter optional with pagination * move pagination out of sql query * tidy --------- Co-authored-by: Michael Smith-Chandler --- .../endpoints/dag_stats_endpoint.py | 40 ++-- airflow/api_connexion/openapi/v1.yaml | 4 +- airflow/www/static/js/types/api-generated.ts | 6 +- .../endpoints/test_dag_stats_endpoint.py | 194 +++++++++++++++++- 4 files changed, 227 insertions(+), 17 deletions(-) diff --git a/airflow/api_connexion/endpoints/dag_stats_endpoint.py b/airflow/api_connexion/endpoints/dag_stats_endpoint.py index 705af10d41d99..3b6c6ab8e0df3 100644 --- a/airflow/api_connexion/endpoints/dag_stats_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_stats_endpoint.py @@ -39,24 +39,40 @@ @security.requires_access_dag("GET", DagAccessEntity.RUN) @provide_session -def get_dag_stats(*, dag_ids: str, session: Session = NEW_SESSION) -> APIResponse: +def get_dag_stats( + *, + dag_ids: str | None = None, + limit: int | None = None, + offset: int | None = None, + session: Session = NEW_SESSION, +) -> APIResponse: """Get Dag statistics.""" allowed_dag_ids = get_auth_manager().get_permitted_dag_ids(methods=["GET"], user=g.user) - dags_list = set(dag_ids.split(",")) - filter_dag_ids = dags_list.intersection(allowed_dag_ids) + if dag_ids: + dags_list = set(dag_ids.split(",")) + filter_dag_ids = dags_list.intersection(allowed_dag_ids) + else: + filter_dag_ids = allowed_dag_ids + query_dag_ids = sorted(list(filter_dag_ids)) + if offset is not None: + query_dag_ids = query_dag_ids[offset:] + if limit is not None: + query_dag_ids = query_dag_ids[:limit] query = ( select(DagRun.dag_id, DagRun.state, func.count(DagRun.state)) .group_by(DagRun.dag_id, DagRun.state) - .where(DagRun.dag_id.in_(filter_dag_ids)) + .where(DagRun.dag_id.in_(query_dag_ids)) ) dag_state_stats = session.execute(query) - dag_state_data = {(dag_id, state): count for dag_id, state, count in dag_state_stats} - dag_stats = { - dag_id: [{"state": state, "count": dag_state_data.get((dag_id, state), 0)} for state in DagRunState] - for dag_id in filter_dag_ids - } - - dags = [{"dag_id": stat, "stats": dag_stats[stat]} for stat in dag_stats] - return dag_stats_collection_schema.dump({"dags": dags, "total_entries": len(dag_stats)}) + dags = [ + { + "dag_id": dag_id, + "stats": [ + {"state": state, "count": dag_state_data.get((dag_id, state), 0)} for state in DagRunState + ], + } + for dag_id in query_dag_ids + ] + return dag_stats_collection_schema.dump({"dags": dags, "total_entries": len(dags)}) diff --git a/airflow/api_connexion/openapi/v1.yaml b/airflow/api_connexion/openapi/v1.yaml index b39d1cd955dd9..e99f91639c49e 100644 --- a/airflow/api_connexion/openapi/v1.yaml +++ b/airflow/api_connexion/openapi/v1.yaml @@ -2384,11 +2384,13 @@ paths: operationId: get_dag_stats tags: [DagStats] parameters: + - $ref: "#/components/parameters/PageLimit" + - $ref: "#/components/parameters/PageOffset" - name: dag_ids in: query schema: type: string - required: true + required: false description: | One or more DAG IDs separated by commas to filter relevant Dags. responses: diff --git a/airflow/www/static/js/types/api-generated.ts b/airflow/www/static/js/types/api-generated.ts index 15391c2942432..ef45dbd3b57b6 100644 --- a/airflow/www/static/js/types/api-generated.ts +++ b/airflow/www/static/js/types/api-generated.ts @@ -4962,8 +4962,12 @@ export interface operations { get_dag_stats: { parameters: { query: { + /** The numbers of items to return. */ + limit?: components["parameters"]["PageLimit"]; + /** The number of items to skip before starting to collect the result set. */ + offset?: components["parameters"]["PageOffset"]; /** One or more DAG IDs separated by commas to filter relevant Dags. */ - dag_ids: string; + dag_ids?: string; }; }; responses: { diff --git a/tests/api_connexion/endpoints/test_dag_stats_endpoint.py b/tests/api_connexion/endpoints/test_dag_stats_endpoint.py index a447e2a6a4b23..fe563b9444030 100644 --- a/tests/api_connexion/endpoints/test_dag_stats_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_stats_endpoint.py @@ -76,9 +76,10 @@ def _create_dag(self, dag_id): self.app.dag_bag.bag_dag(dag) return dag_instance - def test_should_respond_200(self, session): + def _create_dag_runs(self, session): self._create_dag("dag_stats_dag") self._create_dag("dag_stats_dag_2") + self._create_dag("dag_stats_dag_3") dag_1_run_1 = DagRun( dag_id="dag_stats_dag", run_id="test_dag_run_id_1", @@ -106,8 +107,20 @@ def test_should_respond_200(self, session): external_trigger=True, state="queued", ) - session.add_all((dag_1_run_1, dag_1_run_2, dag_2_run_1)) + dag_3_run_1 = DagRun( + dag_id="dag_stats_dag_3", + run_id="test_dag_3_run_id_1", + run_type=DagRunType.MANUAL, + execution_date=timezone.parse(self.default_time), + start_date=timezone.parse(self.default_time), + external_trigger=True, + state="success", + ) + session.add_all((dag_1_run_1, dag_1_run_2, dag_2_run_1, dag_3_run_1)) session.commit() + + def test_should_respond_200(self, session): + self._create_dag_runs(session) exp_payload = { "dags": [ { @@ -165,7 +178,182 @@ def test_should_respond_200(self, session): assert sorted(response.json["dags"], key=lambda d: d["dag_id"]) == sorted( exp_payload["dags"], key=lambda d: d["dag_id"] ) - response.json["total_entries"] == 2 + assert response.json["total_entries"] == 2 + + @pytest.mark.parametrize( + "url, exp_payload", + [ + ( + "api/v1/dagStats", + { + "dags": [ + { + "dag_id": "dag_stats_dag", + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 0, + }, + { + "state": DagRunState.RUNNING, + "count": 1, + }, + { + "state": DagRunState.SUCCESS, + "count": 0, + }, + { + "state": DagRunState.FAILED, + "count": 1, + }, + ], + }, + { + "dag_id": "dag_stats_dag_2", + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 1, + }, + { + "state": DagRunState.RUNNING, + "count": 0, + }, + { + "state": DagRunState.SUCCESS, + "count": 0, + }, + { + "state": DagRunState.FAILED, + "count": 0, + }, + ], + }, + { + "dag_id": "dag_stats_dag_3", + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 0, + }, + { + "state": DagRunState.RUNNING, + "count": 0, + }, + { + "state": DagRunState.SUCCESS, + "count": 1, + }, + { + "state": DagRunState.FAILED, + "count": 0, + }, + ], + }, + ], + "total_entries": 3, + }, + ), + ( + "api/v1/dagStats?limit=1", + { + "dags": [ + { + "dag_id": "dag_stats_dag", + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 0, + }, + { + "state": DagRunState.RUNNING, + "count": 1, + }, + { + "state": DagRunState.SUCCESS, + "count": 0, + }, + { + "state": DagRunState.FAILED, + "count": 1, + }, + ], + } + ], + "total_entries": 1, + }, + ), + ( + "api/v1/dagStats?offset=2", + { + "dags": [ + { + "dag_id": "dag_stats_dag_3", + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 0, + }, + { + "state": DagRunState.RUNNING, + "count": 0, + }, + { + "state": DagRunState.SUCCESS, + "count": 1, + }, + { + "state": DagRunState.FAILED, + "count": 0, + }, + ], + }, + ], + "total_entries": 1, + }, + ), + ( + "api/v1/dagStats?offset=1&limit=1", + { + "dags": [ + { + "dag_id": "dag_stats_dag_2", + "stats": [ + { + "state": DagRunState.QUEUED, + "count": 1, + }, + { + "state": DagRunState.RUNNING, + "count": 0, + }, + { + "state": DagRunState.SUCCESS, + "count": 0, + }, + { + "state": DagRunState.FAILED, + "count": 0, + }, + ], + }, + ], + "total_entries": 1, + }, + ), + ("api/v1/dagStats?offset=10&limit=1", {"dags": [], "total_entries": 0}), + ], + ) + def test_optional_dag_ids_with_limit_offset(self, url, exp_payload, session): + self._create_dag_runs(session) + + response = self.client.get(url, environ_overrides={"REMOTE_USER": "test"}) + num_dags = len(exp_payload["dags"]) + assert response.status_code == 200 + assert sorted(response.json["dags"], key=lambda d: d["dag_id"]) == sorted( + exp_payload["dags"], key=lambda d: d["dag_id"] + ) + assert response.json["total_entries"] == num_dags def test_should_raises_401_unauthenticated(self): dag_ids = "dag_stats_dag,dag_stats_dag_2" From 43f48baef82dc63f474a8f464dcd2eed09d74a02 Mon Sep 17 00:00:00 2001 From: GPK Date: Sun, 13 Oct 2024 16:12:57 +0100 Subject: [PATCH 100/125] trove classifier upgrade (#42979) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 6edf4e5c71175..160e8e2ce0b0c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ requires = [ "pluggy==1.5.0", "smmap==5.0.1", "tomli==2.0.2; python_version < '3.11'", - "trove-classifiers==2024.10.12", + "trove-classifiers==2024.10.13", ] build-backend = "hatchling.build" From d6a1a522e7f7e456dbfe3ff4d44ff26f99fd085a Mon Sep 17 00:00:00 2001 From: Vincent <97131062+vincbeck@users.noreply.github.com> Date: Sun, 13 Oct 2024 18:06:59 -0400 Subject: [PATCH 101/125] Change directory used by simple auth manager to store generated passwords (#42860) * Change directory used by simple auth manager to store generated passwords * Update airflow/auth/managers/simple/simple_auth_manager.py Co-authored-by: Jens Scheffler <95105677+jscheffl@users.noreply.github.com> --------- Co-authored-by: Jens Scheffler <95105677+jscheffl@users.noreply.github.com> --- Dockerfile.ci | 3 +++ .../managers/simple/simple_auth_manager.py | 20 ++++++++++--------- scripts/docker/entrypoint_ci.sh | 3 +++ .../simple/test_simple_auth_manager.py | 4 ++-- tests/auth/managers/simple/views/test_auth.py | 2 +- 5 files changed, 20 insertions(+), 12 deletions(-) diff --git a/Dockerfile.ci b/Dockerfile.ci index 464e33f147fb1..7e0ee74556f9d 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -914,6 +914,9 @@ function environment_initialization() { # Added to have run-tests on path export PATH=${PATH}:${AIRFLOW_SOURCES} + # Directory where simple auth manager store generated passwords + export AIRFLOW_AUTH_MANAGER_CREDENTIAL_DIRECTORY="/files" + mkdir -pv "${AIRFLOW_HOME}/logs/" # Change the default worker_concurrency for tests diff --git a/airflow/auth/managers/simple/simple_auth_manager.py b/airflow/auth/managers/simple/simple_auth_manager.py index 4a9639a998c46..78dccf7c2a980 100644 --- a/airflow/auth/managers/simple/simple_auth_manager.py +++ b/airflow/auth/managers/simple/simple_auth_manager.py @@ -30,7 +30,7 @@ from airflow.auth.managers.base_auth_manager import BaseAuthManager, ResourceMethod from airflow.auth.managers.simple.user import SimpleAuthManagerUser from airflow.auth.managers.simple.views.auth import SimpleAuthManagerAuthenticationViews -from hatch_build import AIRFLOW_ROOT_PATH +from airflow.configuration import AIRFLOW_HOME if TYPE_CHECKING: from airflow.auth.managers.models.base_user import BaseUser @@ -78,20 +78,22 @@ class SimpleAuthManager(BaseAuthManager): :param appbuilder: the flask app builder """ - # File that contains the generated passwords - GENERATED_PASSWORDS_FILE = ( - AIRFLOW_ROOT_PATH / "generated" / "simple_auth_manager_passwords.json.generated" - ) - # Cache containing the password associated to a username passwords: dict[str, str] = {} + @staticmethod + def get_generated_password_file() -> str: + return os.path.join( + os.getenv("AIRFLOW_AUTH_MANAGER_CREDENTIAL_DIRECTORY", AIRFLOW_HOME), + "simple_auth_manager_passwords.json.generated", + ) + def init(self) -> None: user_passwords_from_file = {} # Read passwords from file - if os.path.isfile(self.GENERATED_PASSWORDS_FILE): - with open(self.GENERATED_PASSWORDS_FILE) as file: + if os.path.isfile(self.get_generated_password_file()): + with open(self.get_generated_password_file()) as file: passwords_str = file.read().strip() user_passwords_from_file = json.loads(passwords_str) @@ -109,7 +111,7 @@ def init(self) -> None: self._print_output(f"Password for user '{user['username']}': {self.passwords[user['username']]}") - with open(self.GENERATED_PASSWORDS_FILE, "w") as file: + with open(self.get_generated_password_file(), "w") as file: file.write(json.dumps(self.passwords)) def is_logged_in(self) -> bool: diff --git a/scripts/docker/entrypoint_ci.sh b/scripts/docker/entrypoint_ci.sh index d0946b39e0e28..a96e58b9c21a5 100755 --- a/scripts/docker/entrypoint_ci.sh +++ b/scripts/docker/entrypoint_ci.sh @@ -139,6 +139,9 @@ function environment_initialization() { # Added to have run-tests on path export PATH=${PATH}:${AIRFLOW_SOURCES} + # Directory where simple auth manager store generated passwords + export AIRFLOW_AUTH_MANAGER_CREDENTIAL_DIRECTORY="/files" + mkdir -pv "${AIRFLOW_HOME}/logs/" # Change the default worker_concurrency for tests diff --git a/tests/auth/managers/simple/test_simple_auth_manager.py b/tests/auth/managers/simple/test_simple_auth_manager.py index d4bd4e4fbfed2..434c0d60fcc76 100644 --- a/tests/auth/managers/simple/test_simple_auth_manager.py +++ b/tests/auth/managers/simple/test_simple_auth_manager.py @@ -50,7 +50,7 @@ class TestSimpleAuthManager: @pytest.mark.db_test def test_init_with_no_user(self, auth_manager_with_appbuilder): auth_manager_with_appbuilder.init() - with open(SimpleAuthManager.GENERATED_PASSWORDS_FILE) as file: + with open(auth_manager_with_appbuilder.get_generated_password_file()) as file: passwords_str = file.read().strip() user_passwords_from_file = json.loads(passwords_str) @@ -65,7 +65,7 @@ def test_init_with_users(self, auth_manager_with_appbuilder): } ] auth_manager_with_appbuilder.init() - with open(SimpleAuthManager.GENERATED_PASSWORDS_FILE) as file: + with open(auth_manager_with_appbuilder.get_generated_password_file()) as file: passwords_str = file.read().strip() user_passwords_from_file = json.loads(passwords_str) diff --git a/tests/auth/managers/simple/views/test_auth.py b/tests/auth/managers/simple/views/test_auth.py index a4e2a12fdcf1c..f61a9278f79a5 100644 --- a/tests/auth/managers/simple/views/test_auth.py +++ b/tests/auth/managers/simple/views/test_auth.py @@ -37,7 +37,7 @@ def simple_app(): ): "airflow.auth.managers.simple.simple_auth_manager.SimpleAuthManager", } ): - with open(SimpleAuthManager.GENERATED_PASSWORDS_FILE, "w") as file: + with open(SimpleAuthManager.get_generated_password_file(), "w") as file: user = {"test": "test"} file.write(json.dumps(user)) From 20f82901f4437b8a6ce2831e4b0f9d245056ce7d Mon Sep 17 00:00:00 2001 From: GPK Date: Sun, 13 Oct 2024 23:25:55 +0100 Subject: [PATCH 102/125] Disable flaky mssql based integration tests (#42811) * quarantined flaky mssql integration tests * disable mssql from ci integration checks --- dev/breeze/src/airflow_breeze/global_constants.py | 3 +++ dev/breeze/src/airflow_breeze/utils/selective_checks.py | 7 ++++++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index 3879c121433f5..cd8a59aa05cef 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -68,6 +68,9 @@ "trino", "ydb", ] +DISABLE_TESTABLE_INTEGRATIONS_FROM_CI = [ + "mssql", +] OTHER_INTEGRATIONS = ["statsd", "otel", "openlineage"] ALLOWED_DEBIAN_VERSIONS = ["bookworm"] ALL_INTEGRATIONS = sorted( diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index 20bf514379f0b..cf4e124ff94e3 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -42,6 +42,7 @@ DEFAULT_MYSQL_VERSION, DEFAULT_POSTGRES_VERSION, DEFAULT_PYTHON_MAJOR_MINOR_VERSION, + DISABLE_TESTABLE_INTEGRATIONS_FROM_CI, HELM_VERSION, KIND_VERSION, RUNS_ON_PUBLIC_RUNNER, @@ -1318,7 +1319,11 @@ def excluded_providers_as_string(self) -> str: @cached_property def testable_integrations(self) -> list[str]: - return TESTABLE_INTEGRATIONS + return [ + integration + for integration in TESTABLE_INTEGRATIONS + if integration not in DISABLE_TESTABLE_INTEGRATIONS_FROM_CI + ] @cached_property def is_committer_build(self): From d9956cc239c2087edd143bcc8188878ab554fa30 Mon Sep 17 00:00:00 2001 From: Daniel Standish <15932138+dstandish@users.noreply.github.com> Date: Sun, 13 Oct 2024 18:15:11 -0700 Subject: [PATCH 103/125] Remove BackfillJobRunner class (#42943) --- airflow/cli/cli_config.py | 2 +- airflow/exceptions.py | 25 - airflow/jobs/backfill_job_runner.py | 1106 --------- airflow/jobs/job.py | 4 +- airflow/models/dag.py | 3 +- airflow/models/dagpickle.py | 2 +- airflow/models/dagrun.py | 2 +- airflow/models/taskinstance.py | 2 +- airflow/ti_deps/dependencies_deps.py | 8 - airflow/ti_deps/dependencies_states.py | 9 - .../listeners.rst | 2 +- docs/apache-airflow/howto/listener-plugin.rst | 4 +- tests/core/test_impersonation_tests.py | 24 +- tests/jobs/test_backfill_job.py | 2094 ----------------- tests/jobs/test_scheduler_job.py | 29 +- 15 files changed, 29 insertions(+), 3287 deletions(-) delete mode 100644 airflow/jobs/backfill_job_runner.py delete mode 100644 tests/jobs/test_backfill_job.py diff --git a/airflow/cli/cli_config.py b/airflow/cli/cli_config.py index e68a464f61bb5..8fe239c27ba40 100644 --- a/airflow/cli/cli_config.py +++ b/airflow/cli/cli_config.py @@ -924,7 +924,7 @@ def string_lower_type(val): # jobs check ARG_JOB_TYPE_FILTER = Arg( ("--job-type",), - choices=("BackfillJob", "LocalTaskJob", "SchedulerJob", "TriggererJob", "DagProcessorJob"), + choices=("LocalTaskJob", "SchedulerJob", "TriggererJob", "DagProcessorJob"), action="store", help="The type of job(s) that will be checked.", ) diff --git a/airflow/exceptions.py b/airflow/exceptions.py index 55dd02fdae313..ccf62ca5e8178 100644 --- a/airflow/exceptions.py +++ b/airflow/exceptions.py @@ -327,31 +327,6 @@ class PoolNotFound(AirflowNotFoundException): """Raise when a Pool is not available in the system.""" -class NoAvailablePoolSlot(AirflowException): - """Raise when there is not enough slots in pool.""" - - -class DagConcurrencyLimitReached(AirflowException): - """Raise when DAG max_active_tasks limit is reached.""" - - -class TaskConcurrencyLimitReached(AirflowException): - """Raise when task max_active_tasks limit is reached.""" - - -class BackfillUnfinished(AirflowException): - """ - Raises when not all tasks succeed in backfill. - - :param message: The human-readable description of the exception - :param ti_status: The information about all task statuses - """ - - def __init__(self, message, ti_status): - super().__init__(message) - self.ti_status = ti_status - - class FileSyntaxError(NamedTuple): """Information about a single error in a file.""" diff --git a/airflow/jobs/backfill_job_runner.py b/airflow/jobs/backfill_job_runner.py deleted file mode 100644 index 19dda4d698221..0000000000000 --- a/airflow/jobs/backfill_job_runner.py +++ /dev/null @@ -1,1106 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import time -from typing import TYPE_CHECKING, Any, Iterable, Iterator, Mapping, Sequence - -import attr -import pendulum -from sqlalchemy import case, or_, select, tuple_, update -from sqlalchemy.exc import OperationalError -from sqlalchemy.orm.session import make_transient -from tabulate import tabulate - -from airflow import models -from airflow.exceptions import ( - AirflowException, - BackfillUnfinished, - DagConcurrencyLimitReached, - NoAvailablePoolSlot, - PoolNotFound, - TaskConcurrencyLimitReached, - UnknownExecutorException, -) -from airflow.executors.executor_loader import ExecutorLoader -from airflow.jobs.base_job_runner import BaseJobRunner -from airflow.jobs.job import Job, perform_heartbeat -from airflow.models import DAG, DagPickle -from airflow.models.dagrun import DagRun -from airflow.models.taskinstance import TaskInstance -from airflow.ti_deps.dep_context import DepContext -from airflow.ti_deps.dependencies_deps import BACKFILL_QUEUED_DEPS -from airflow.timetables.base import DagRunInfo -from airflow.utils import helpers, timezone -from airflow.utils.configuration import tmp_configuration_copy -from airflow.utils.log.logging_mixin import LoggingMixin -from airflow.utils.session import NEW_SESSION, provide_session -from airflow.utils.state import DagRunState, State, TaskInstanceState -from airflow.utils.types import DagRunTriggeredByType, DagRunType - -if TYPE_CHECKING: - import datetime - - from sqlalchemy.orm.session import Session - - from airflow.executors.base_executor import BaseExecutor - from airflow.models.abstractoperator import AbstractOperator - from airflow.models.taskinstance import TaskInstanceKey - - -class BackfillJobRunner(BaseJobRunner, LoggingMixin): - """ - A backfill job runner consists of a dag for a specific time range. - - It triggers a set of task instance runs, in the right order and lasts for - as long as it takes for the set of task instance to be completed. - """ - - job_type = "BackfillJob" - - STATES_COUNT_AS_RUNNING = (TaskInstanceState.RUNNING, TaskInstanceState.QUEUED) - - @attr.define - class _DagRunTaskStatus: - """ - Internal status of the backfill job. - - This class is intended to be instantiated only within a BackfillJobRunner - instance and will track the execution of tasks, e.g. running, skipped, - succeeded, failed, etc. Information about the dag runs related to the - backfill job are also being tracked in this structure, e.g. finished runs, etc. - Any other status related information related to the execution of dag runs / tasks - can be included in this structure since it makes it easier to pass it around. - - :param to_run: Tasks to run in the backfill - :param running: Maps running task instance key to task instance object - :param skipped: Tasks that have been skipped - :param succeeded: Tasks that have succeeded so far - :param failed: Tasks that have failed - :param not_ready: Tasks not ready for execution - :param deadlocked: Deadlocked tasks - :param active_runs: Active dag runs at a certain point in time - :param executed_dag_run_dates: Datetime objects for the executed dag runs - :param finished_runs: Number of finished runs so far - :param total_runs: Number of total dag runs able to run - """ - - to_run: dict[TaskInstanceKey, TaskInstance] = attr.ib(factory=dict) - running: dict[TaskInstanceKey, TaskInstance] = attr.ib(factory=dict) - skipped: set[TaskInstanceKey] = attr.ib(factory=set) - succeeded: set[TaskInstanceKey] = attr.ib(factory=set) - failed: set[TaskInstanceKey] = attr.ib(factory=set) - not_ready: set[TaskInstanceKey] = attr.ib(factory=set) - deadlocked: set[TaskInstance] = attr.ib(factory=set) - active_runs: set[DagRun] = attr.ib(factory=set) - executed_dag_run_dates: set[pendulum.DateTime] = attr.ib(factory=set) - finished_runs: int = 0 - total_runs: int = 0 - - def __init__( - self, - job: Job, - dag: DAG, - start_date=None, - end_date=None, - mark_success=False, - donot_pickle=False, - ignore_first_depends_on_past=False, - ignore_task_deps=False, - pool=None, - delay_on_limit_secs=1.0, - verbose=False, - conf=None, - rerun_failed_tasks=False, - run_backwards=False, - run_at_least_once=False, - continue_on_failures=False, - disable_retry=False, - ) -> None: - """ - Create a BackfillJobRunner. - - :param dag: DAG object. - :param start_date: start date for the backfill date range. - :param end_date: end date for the backfill date range. - :param mark_success: flag whether to mark the task auto success. - :param donot_pickle: whether pickle - :param ignore_first_depends_on_past: whether to ignore depend on past - :param ignore_task_deps: whether to ignore the task dependency - :param pool: pool to backfill - :param delay_on_limit_secs: - :param verbose: - :param conf: a dictionary which user could pass k-v pairs for backfill - :param rerun_failed_tasks: flag to whether to - auto rerun the failed task in backfill - :param run_backwards: Whether to process the dates from most to least recent - :param run_at_least_once: If true, always run the DAG at least once even - if no logical run exists within the time range. - :param args: - :param kwargs: - """ - super().__init__(job) - self.dag = dag - self.dag_id = dag.dag_id - self.bf_start_date = start_date - self.bf_end_date = end_date - self.mark_success = mark_success - self.donot_pickle = donot_pickle - self.ignore_first_depends_on_past = ignore_first_depends_on_past - self.ignore_task_deps = ignore_task_deps - self.pool = pool - self.delay_on_limit_secs = delay_on_limit_secs - self.verbose = verbose - self.conf = conf - self.rerun_failed_tasks = rerun_failed_tasks - self.run_backwards = run_backwards - self.run_at_least_once = run_at_least_once - self.continue_on_failures = continue_on_failures - self.disable_retry = disable_retry - - def _update_counters(self, ti_status: _DagRunTaskStatus, session: Session) -> None: - """ - Update the counters per state of the tasks that were running. - - Can re-add to tasks to run when required. - - :param ti_status: the internal status of the backfill job tasks - """ - tis_to_be_scheduled = [] - refreshed_tis = [] - TI = TaskInstance - - ti_primary_key_to_ti_key = {ti_key.primary: ti_key for ti_key in ti_status.running.keys()} - - filter_for_tis = TI.filter_for_tis(list(ti_status.running.values())) - if filter_for_tis is not None: - refreshed_tis = session.scalars(select(TI).where(filter_for_tis)).all() - - for ti in refreshed_tis: - # Use primary key to match in memory information - ti_key = ti_primary_key_to_ti_key[ti.key.primary] - if ti.state == TaskInstanceState.SUCCESS: - ti_status.succeeded.add(ti_key) - self.log.debug("Task instance %s succeeded. Don't rerun.", ti) - ti_status.running.pop(ti_key) - continue - if ti.state == TaskInstanceState.SKIPPED: - ti_status.skipped.add(ti_key) - self.log.debug("Task instance %s skipped. Don't rerun.", ti) - ti_status.running.pop(ti_key) - continue - if ti.state == TaskInstanceState.FAILED: - self.log.error("Task instance %s failed", ti) - ti_status.failed.add(ti_key) - ti_status.running.pop(ti_key) - continue - # special case: if the task needs to run again put it back - if ti.state == TaskInstanceState.UP_FOR_RETRY: - self.log.warning("Task instance %s is up for retry", ti) - ti_status.running.pop(ti_key) - ti_status.to_run[ti.key] = ti - # special case: if the task needs to be rescheduled put it back - elif ti.state == TaskInstanceState.UP_FOR_RESCHEDULE: - self.log.warning("Task instance %s is up for reschedule", ti) - ti_status.running.pop(ti_key) - ti_status.to_run[ti.key] = ti - # special case: The state of the task can be set to NONE by the task itself - # when it reaches concurrency limits. It could also happen when the state - # is changed externally, e.g. by clearing tasks from the ui. We need to cover - # for that as otherwise those tasks would fall outside the scope of - # the backfill suddenly. - elif ti.state is None: - self.log.warning( - "FIXME: task instance %s state was set to none externally or " - "reaching concurrency limits. Re-adding task to queue.", - ti, - ) - tis_to_be_scheduled.append(ti) - ti_status.running.pop(ti_key) - ti_status.to_run[ti.key] = ti - # special case: Deferrable task can go from DEFERRED to SCHEDULED; - # when that happens, we need to put it back as in UP_FOR_RESCHEDULE - elif ti.state == TaskInstanceState.SCHEDULED: - self.log.debug("Task instance %s is resumed from deferred state", ti) - ti_status.running.pop(ti_key) - ti_status.to_run[ti.key] = ti - - # Batch schedule of task instances - if tis_to_be_scheduled: - filter_for_tis = TI.filter_for_tis(tis_to_be_scheduled) - session.execute( - update(TI) - .where(filter_for_tis) - .values( - state=TaskInstanceState.SCHEDULED, - try_number=case( - ( - or_(TI.state.is_(None), TI.state != TaskInstanceState.UP_FOR_RESCHEDULE), - TI.try_number + 1, - ), - else_=TI.try_number, - ), - ) - .execution_options(synchronize_session=False) - ) - session.flush() - - def _manage_executor_state( - self, - running: Mapping[TaskInstanceKey, TaskInstance], - executor: BaseExecutor, - session: Session, - ) -> Iterator[tuple[AbstractOperator, str, Sequence[TaskInstance], int]]: - """ - Compare task instances' states with that of the executor. - - Expands downstream mapped tasks when necessary. - - :param running: dict of key, task to verify - :return: An iterable of expanded TaskInstance per MappedTask - """ - # list of tuples (dag_id, task_id, execution_date, map_index) of running tasks in executor - buffered_events = list(executor.get_event_buffer().items()) - running_tis_ids = [ - (key.dag_id, key.task_id, key.run_id, key.map_index) - for key, _ in buffered_events - if key in running - ] - # list of TaskInstance of running tasks in executor (refreshed from db in batch) - refreshed_running_tis = session.scalars( - select(TaskInstance).where( - tuple_( - TaskInstance.dag_id, - TaskInstance.task_id, - TaskInstance.run_id, - TaskInstance.map_index, - ).in_(running_tis_ids) - ) - ).all() - # dict of refreshed TaskInstance by key to easily find them - running_dict = {(ti.dag_id, ti.task_id, ti.run_id, ti.map_index): ti for ti in refreshed_running_tis} - need_refresh = False - - for key, value in buffered_events: - state, info = value - ti_key = (key.dag_id, key.task_id, key.run_id, key.map_index) - if ti_key not in running_dict: - self.log.warning("%s state %s not in running=%s", key, state, running.values()) - continue - - ti = running_dict[ti_key] - if need_refresh: - ti.refresh_from_db(session=session) - - self.log.debug("Executor state: %s task %s", state, ti) - - if ( - state in (TaskInstanceState.FAILED, TaskInstanceState.SUCCESS) - and ti.state in self.STATES_COUNT_AS_RUNNING - ): - msg = ( - f"The executor reported that the task instance {ti} finished with state {state}, " - f"but the task instance's state attribute is {ti.state}. " - "Learn more: https://airflow.apache.org/docs/apache-airflow/stable/troubleshooting.html#task-state-changed-externally" - ) - if info is not None: - msg += f" Extra info: {info}" - self.log.error(msg) - ti.handle_failure(error=msg) - continue - - def _iter_task_needing_expansion() -> Iterator[AbstractOperator]: - from airflow.models.mappedoperator import AbstractOperator - - for node in self.dag.get_task(ti.task_id).iter_mapped_dependants(): - if isinstance(node, AbstractOperator): - yield node - else: # A (mapped) task group. All its children need expansion. - yield from node.iter_tasks() - - if ti.state not in self.STATES_COUNT_AS_RUNNING: - # Don't use ti.task; if this task is mapped, that attribute - # would hold the unmapped task. We need to original task here. - for node in _iter_task_needing_expansion(): - new_tis, num_mapped_tis = node.expand_mapped_task(ti.run_id, session=session) - yield node, ti.run_id, new_tis, num_mapped_tis - - @provide_session - def _get_dag_run( - self, - dagrun_info: DagRunInfo, - dag: DAG, - session: Session = NEW_SESSION, - ) -> DagRun | None: - """ - Return an existing dag run for the given run date or create one. - - If the max_active_runs limit is reached, this function will return None. - - :param dagrun_info: Schedule information for the dag run - :param dag: DAG - :param session: the database session object - :return: a DagRun in state RUNNING or None - """ - run_date = dagrun_info.logical_date - - respect_dag_max_active_limit = bool(dag.timetable.can_be_scheduled) - - current_active_dag_count = dag.get_num_active_runs(external_trigger=False) - - # check if we are scheduling on top of an already existing DAG run - # we could find a "scheduled" run instead of a "backfill" - runs = DagRun.find(dag_id=dag.dag_id, execution_date=run_date, session=session) - run: DagRun | None - if runs: - run = runs[0] - if run.state == DagRunState.RUNNING: - respect_dag_max_active_limit = False - # Fixes --conf overwrite for backfills with already existing DagRuns - run.conf = self.conf or {} - # start_date is cleared for existing DagRuns - run.start_date = timezone.utcnow() - else: - run = None - - # enforce max_active_runs limit for dag, special cases already - # handled by respect_dag_max_active_limit - if respect_dag_max_active_limit and current_active_dag_count >= dag.max_active_runs: - return None - - run = run or dag.create_dagrun( - execution_date=run_date, - data_interval=dagrun_info.data_interval, - start_date=timezone.utcnow(), - state=DagRunState.RUNNING, - external_trigger=False, - session=session, - conf=self.conf, - run_type=DagRunType.BACKFILL_JOB, - creating_job_id=self.job.id, - triggered_by=DagRunTriggeredByType.TIMETABLE, - ) - - # set required transient field - run.dag = dag - - # explicitly mark as backfill and running - run.state = DagRunState.RUNNING - run.run_type = DagRunType.BACKFILL_JOB - run.verify_integrity(session=session) - - run.notify_dagrun_state_changed(msg="started") - return run - - @provide_session - def _task_instances_for_dag_run( - self, - dag: DAG, - dag_run: DagRun, - session: Session = NEW_SESSION, - ) -> dict[TaskInstanceKey, TaskInstance]: - """ - Return a map of task instance keys to task instance objects for the given dag run. - - :param dag_run: the dag run to get the tasks from - :param session: the database session object - """ - tasks_to_run = {} - - if dag_run is None: - return tasks_to_run - - # check if we have orphaned tasks - self.reset_state_for_orphaned_tasks(filter_by_dag_run=dag_run, session=session) - - # for some reason if we don't refresh the reference to run is lost - dag_run.refresh_from_db(session=session) - make_transient(dag_run) - - dag_run.dag = dag - info = dag_run.task_instance_scheduling_decisions(session=session) - schedulable_tis = info.schedulable_tis - try: - for ti in dag_run.get_task_instances(session=session): - if ti in schedulable_tis: - if ti.state != TaskInstanceState.UP_FOR_RESCHEDULE: - ti.try_number += 1 - ti.set_state(TaskInstanceState.SCHEDULED) - if ti.state != TaskInstanceState.REMOVED: - tasks_to_run[ti.key] = ti - session.commit() - except Exception: - session.rollback() - raise - return tasks_to_run - - def _log_progress(self, ti_status: _DagRunTaskStatus) -> None: - self.log.info( - "[backfill progress] | finished run %s of %s | tasks waiting: %s | succeeded: %s | " - "running: %s | failed: %s | skipped: %s | deadlocked: %s | not ready: %s", - ti_status.finished_runs, - ti_status.total_runs, - len(ti_status.to_run), - len(ti_status.succeeded), - len(ti_status.running), - len(ti_status.failed), - len(ti_status.skipped), - len(ti_status.deadlocked), - len(ti_status.not_ready), - ) - - self.log.debug("Finished dag run loop iteration. Remaining tasks %s", ti_status.to_run.values()) - - def _process_backfill_task_instances( - self, - ti_status: _DagRunTaskStatus, - pickle_id: int | None, - start_date: datetime.datetime | None = None, - *, - session: Session, - ) -> list: - """ - Process a set of task instances from a set of DAG runs. - - Special handling is done to account for different task instance states - that could be present when running them in a backfill process. - - :param ti_status: the internal status of the job - :param executor: the executor to run the task instances - :param pickle_id: the pickle_id if dag is pickled, None otherwise - :param start_date: the start date of the backfill job - :param session: the current session object - :return: the list of execution_dates for the finished dag runs - """ - executed_run_dates = [] - - while (ti_status.to_run or ti_status.running) and not ti_status.deadlocked: - self.log.debug("Clearing out not_ready list") - ti_status.not_ready.clear() - - # we need to execute the tasks bottom to top - # or leaf to root, as otherwise tasks might be - # determined deadlocked while they are actually - # waiting for their upstream to finish - def _per_task_process(key, ti: TaskInstance, session): - ti.refresh_from_db(lock_for_update=True, session=session) - - task = self.dag.get_task(ti.task_id) - ti.task = task - - self.log.debug("Task instance to run %s state %s", ti, ti.state) - - # The task was already marked successful or skipped by a - # different Job. Don't rerun it. - if ti.state == TaskInstanceState.SUCCESS: - ti_status.succeeded.add(key) - self.log.debug("Task instance %s succeeded. Don't rerun.", ti) - ti_status.to_run.pop(key) - if key in ti_status.running: - ti_status.running.pop(key) - return - elif ti.state == TaskInstanceState.SKIPPED: - ti_status.skipped.add(key) - self.log.debug("Task instance %s skipped. Don't rerun.", ti) - ti_status.to_run.pop(key) - if key in ti_status.running: - ti_status.running.pop(key) - return - - if self.rerun_failed_tasks: - # Rerun failed tasks or upstreamed failed tasks - if ti.state in (TaskInstanceState.FAILED, TaskInstanceState.UPSTREAM_FAILED): - self.log.error("Task instance %s with state %s", ti, ti.state) - if key in ti_status.running: - ti_status.running.pop(key) - # Reset the failed task in backfill to scheduled state - ti.try_number += 1 - ti.set_state(TaskInstanceState.SCHEDULED, session=session) - if ti.dag_run not in ti_status.active_runs: - ti_status.active_runs.add(ti.dag_run) - else: - # Default behaviour which works for subdag. - if ti.state in (TaskInstanceState.FAILED, TaskInstanceState.UPSTREAM_FAILED): - self.log.error("Task instance %s with state %s", ti, ti.state) - ti_status.failed.add(key) - ti_status.to_run.pop(key) - if key in ti_status.running: - ti_status.running.pop(key) - return - - if self.ignore_first_depends_on_past: - dagrun = ti.get_dagrun(session=session) - ignore_depends_on_past = dagrun.execution_date == (start_date or ti.start_date) - else: - ignore_depends_on_past = False - - backfill_context = DepContext( - deps=BACKFILL_QUEUED_DEPS, - ignore_depends_on_past=ignore_depends_on_past, - ignore_task_deps=self.ignore_task_deps, - wait_for_past_depends_before_skipping=False, - flag_upstream_failed=True, - ) - - executor = ExecutorLoader.load_executor(str(ti.executor) if ti.executor else None) - # Is the task runnable? -- then run it - # the dependency checker can change states of tis - if ti.are_dependencies_met( - dep_context=backfill_context, session=session, verbose=self.verbose - ): - if executor.has_task(ti): - self.log.debug("Task Instance %s already in executor waiting for queue to clear", ti) - else: - self.log.debug("Sending %s to executor", ti) - # Skip scheduled state, we are executing immediately - if ti.state in (TaskInstanceState.UP_FOR_RETRY, None): - # i am not sure why this is necessary. - # seemingly a quirk of backfill runner. - # it should be handled elsewhere i think. - # seems the leaf tasks are set SCHEDULED but others not. - # but i am not going to look too closely since we need - # to nuke the current backfill approach anyway. - ti.try_number += 1 - ti.state = TaskInstanceState.QUEUED - ti.queued_by_job_id = self.job.id - ti.queued_dttm = timezone.utcnow() - session.merge(ti) - try: - session.commit() - except OperationalError: - self.log.exception("Failed to commit task state change due to operational error") - session.rollback() - # early exit so the outer loop can retry - return - - cfg_path = None - - if executor.is_local: - cfg_path = tmp_configuration_copy() - - executor.queue_task_instance( - ti, - mark_success=self.mark_success, - pickle_id=pickle_id, - ignore_task_deps=self.ignore_task_deps, - ignore_depends_on_past=ignore_depends_on_past, - wait_for_past_depends_before_skipping=False, - pool=self.pool, - cfg_path=cfg_path, - ) - ti_status.running[key] = ti - ti_status.to_run.pop(key) - return - - if ti.state == TaskInstanceState.UPSTREAM_FAILED: - self.log.error("Task instance %s upstream failed", ti) - ti_status.failed.add(key) - ti_status.to_run.pop(key) - if key in ti_status.running: - ti_status.running.pop(key) - return - - # special case - if ti.state == TaskInstanceState.UP_FOR_RETRY: - self.log.debug("Task instance %s retry period not expired yet", ti) - if key in ti_status.running: - ti_status.running.pop(key) - ti_status.to_run[key] = ti - return - - # special case - if ti.state == TaskInstanceState.UP_FOR_RESCHEDULE: - self.log.debug("Task instance %s reschedule period not expired yet", ti) - if key in ti_status.running: - ti_status.running.pop(key) - ti_status.to_run[key] = ti - return - - # all remaining tasks - self.log.debug("Adding %s to not_ready", ti) - ti_status.not_ready.add(key) - - try: - for task in self.dag.topological_sort(): - for key, ti in list(ti_status.to_run.items()): - # Attempt to workaround deadlock on backfill by attempting to commit the transaction - # state update few times before giving up - max_attempts = 5 - for i in range(max_attempts): - if task.task_id != ti.task_id: - continue - - pool = session.scalar( - select(models.Pool).where(models.Pool.pool == task.pool).limit(1) - ) - if not pool: - raise PoolNotFound(f"Unknown pool: {task.pool}") - - open_slots = pool.open_slots(session=session) - if open_slots <= 0: - raise NoAvailablePoolSlot( - f"Not scheduling since there are {open_slots} " - f"open slots in pool {task.pool}" - ) - - num_running_task_instances_in_dag = DAG.get_num_task_instances( - self.dag_id, - states=self.STATES_COUNT_AS_RUNNING, - session=session, - ) - - if num_running_task_instances_in_dag >= self.dag.max_active_tasks: - raise DagConcurrencyLimitReached( - "Not scheduling since DAG max_active_tasks limit is reached." - ) - - if task.max_active_tis_per_dag is not None: - num_running_task_instances_in_task = DAG.get_num_task_instances( - dag_id=self.dag_id, - task_ids=[task.task_id], - states=self.STATES_COUNT_AS_RUNNING, - session=session, - ) - - if num_running_task_instances_in_task >= task.max_active_tis_per_dag: - raise TaskConcurrencyLimitReached( - "Not scheduling since Task concurrency limit is reached." - ) - - if task.max_active_tis_per_dagrun is not None: - num_running_task_instances_in_task_dagrun = DAG.get_num_task_instances( - dag_id=self.dag_id, - run_id=ti.run_id, - task_ids=[task.task_id], - states=self.STATES_COUNT_AS_RUNNING, - session=session, - ) - - if ( - num_running_task_instances_in_task_dagrun - >= task.max_active_tis_per_dagrun - ): - raise TaskConcurrencyLimitReached( - "Not scheduling since Task concurrency per DAG run limit is reached." - ) - - _per_task_process(key, ti, session) - try: - session.commit() - except OperationalError: - self.log.exception( - "Failed to commit task state due to operational error. " - "The job will retry this operation so if your backfill succeeds, " - "you can safely ignore this message.", - ) - session.rollback() - if i == max_attempts - 1: - raise - # retry the loop - else: - # break the retry loop - break - except (NoAvailablePoolSlot, DagConcurrencyLimitReached, TaskConcurrencyLimitReached) as e: - self.log.debug(e) - - perform_heartbeat( - job=self.job, - heartbeat_callback=self.heartbeat_callback, - only_if_necessary=True, - ) - # execute the tasks in the queue - for executor in self.job.executors: - executor.heartbeat() - - # If the set of tasks that aren't ready ever equals the set of - # tasks to run and there are no running tasks then the backfill - # is deadlocked - if ti_status.not_ready and ti_status.not_ready == set(ti_status.to_run) and not ti_status.running: - self.log.warning("Deadlock discovered for ti_status.to_run=%s", ti_status.to_run.values()) - ti_status.deadlocked.update(ti_status.to_run.values()) - ti_status.to_run.clear() - - for executor in self.job.executors: - # check executor state -- and expand any mapped TIs - for node, run_id, new_mapped_tis, max_map_index in self._manage_executor_state( - ti_status.running, executor, session - ): - - def to_keep(key: TaskInstanceKey) -> bool: - if key.dag_id != node.dag_id or key.task_id != node.task_id or key.run_id != run_id: - # For another Dag/Task/Run -- don't remove - return True - return 0 <= key.map_index <= max_map_index - - # remove the old unmapped TIs for node -- they have been replaced with the mapped TIs - ti_status.to_run = {key: ti for (key, ti) in ti_status.to_run.items() if to_keep(key)} - - ti_status.to_run.update({ti.key: ti for ti in new_mapped_tis}) - - for new_ti in new_mapped_tis: - new_ti.try_number += 1 - new_ti.set_state(TaskInstanceState.SCHEDULED, session=session) - - # Set state to failed for running TIs that are set up for retry if disable-retry flag is set - for ti in ti_status.running.values(): - if self.disable_retry and ti.state == TaskInstanceState.UP_FOR_RETRY: - ti.set_state(TaskInstanceState.FAILED, session=session) - - # update the task counters - self._update_counters(ti_status=ti_status, session=session) - session.commit() - - # update dag run state - _dag_runs = ti_status.active_runs.copy() - for run in _dag_runs: - run.update_state(session=session) - if run.state in State.finished_dr_states: - ti_status.finished_runs += 1 - ti_status.active_runs.remove(run) - executed_run_dates.append(run.execution_date) - - self._log_progress(ti_status) - session.commit() - time.sleep(1) - - # return updated status - return executed_run_dates - - @provide_session - def _collect_errors(self, ti_status: _DagRunTaskStatus, session: Session = NEW_SESSION) -> Iterator[str]: - def tabulate_ti_keys_set(ti_keys: Iterable[TaskInstanceKey]) -> str: - # Sorting by execution date first - sorted_ti_keys: Any = sorted( - ti_keys, - key=lambda ti_key: ( - ti_key.run_id, - ti_key.dag_id, - ti_key.task_id, - ti_key.map_index, - ti_key.try_number, - ), - ) - - if all(key.map_index == -1 for key in ti_keys): - headers = ["DAG ID", "Task ID", "Run ID", "Try number"] - sorted_ti_keys = (k[0:4] for k in sorted_ti_keys) - else: - headers = ["DAG ID", "Task ID", "Run ID", "Map Index", "Try number"] - - return tabulate(sorted_ti_keys, headers=headers) - - if ti_status.failed: - yield "Some task instances failed:\n" - yield tabulate_ti_keys_set(ti_status.failed) - if ti_status.deadlocked: - yield "BackfillJob is deadlocked." - deadlocked_depends_on_past = any( - t.are_dependencies_met( - dep_context=DepContext(ignore_depends_on_past=False), - session=session, - verbose=self.verbose, - ) - != t.are_dependencies_met( - dep_context=DepContext(ignore_depends_on_past=True), session=session, verbose=self.verbose - ) - for t in ti_status.deadlocked - ) - if deadlocked_depends_on_past: - yield ( - "Some of the deadlocked tasks were unable to run because " - 'of "depends_on_past" relationships. Try running the ' - "backfill with the option " - '"ignore_first_depends_on_past=True" or passing "-I" at ' - "the command line." - ) - yield "\nThese tasks have succeeded:\n" - yield tabulate_ti_keys_set(ti_status.succeeded) - yield "\n\nThese tasks are running:\n" - yield tabulate_ti_keys_set(ti_status.running) - yield "\n\nThese tasks have failed:\n" - yield tabulate_ti_keys_set(ti_status.failed) - yield "\n\nThese tasks are skipped:\n" - yield tabulate_ti_keys_set(ti_status.skipped) - yield "\n\nThese tasks are deadlocked:\n" - yield tabulate_ti_keys_set([ti.key for ti in ti_status.deadlocked]) - - @provide_session - def _execute_dagruns( - self, - dagrun_infos: Iterable[DagRunInfo], - ti_status: _DagRunTaskStatus, - pickle_id: int | None, - start_date: datetime.datetime | None, - session: Session = NEW_SESSION, - ) -> None: - """ - Compute and execute dag runs and their respective task instances for the given dates. - - Returns a list of execution dates of the dag runs that were executed. - - :param dagrun_infos: Schedule information for dag runs - :param ti_status: internal BackfillJobRunner status structure to tis track progress - :param pickle_id: numeric id of the pickled dag, None if not pickled - :param start_date: backfill start date - :param session: the current session object - """ - for dagrun_info in dagrun_infos: - dag_run = self._get_dag_run(dagrun_info, self.dag, session=session) - if dag_run is not None: - tis_map = self._task_instances_for_dag_run(self.dag, dag_run, session=session) - ti_status.active_runs.add(dag_run) - ti_status.to_run.update(tis_map or {}) - - tis_missing_executor = [] - for ti in ti_status.to_run.values(): - if ti.executor: - try: - ExecutorLoader.lookup_executor_name_by_str(ti.executor) - except UnknownExecutorException: - tis_missing_executor.append(ti) - - if tis_missing_executor: - raise UnknownExecutorException( - "The following task instances are configured to use an executor that is not present. " - "Review the core.executors Airflow configuration to add it or clear the task instance to " - "clear the executor configuration for this task.\n" - + "\n".join( - [f" {ti.task_id}: {ti.run_id} (executor: {ti.executor})" for ti in tis_missing_executor] - ) - ) - processed_dag_run_dates = self._process_backfill_task_instances( - ti_status=ti_status, - pickle_id=pickle_id, - start_date=start_date, - session=session, - ) - - ti_status.executed_dag_run_dates.update(processed_dag_run_dates) - - @provide_session - def _set_unfinished_dag_runs_to_failed( - self, - dag_runs: Iterable[DagRun], - session: Session = NEW_SESSION, - ) -> None: - """ - Update the state of each dagrun based on the task_instance state and set unfinished runs to failed. - - :param dag_runs: DAG runs - :param session: session - :return: None - """ - for dag_run in dag_runs: - dag_run.update_state() - if dag_run.state not in State.finished_dr_states: - dag_run.set_state(DagRunState.FAILED) - session.merge(dag_run) - - @provide_session - def _execute(self, session: Session = NEW_SESSION) -> None: - """ - Initialize all required components of a dag for a specified date range and execute the tasks. - - :meta private: - """ - ti_status = BackfillJobRunner._DagRunTaskStatus() - - start_date = self.bf_start_date - - # Get DagRun schedule between the start/end dates, which will turn into dag runs. - dagrun_start_date = timezone.coerce_datetime(start_date) - if self.bf_end_date is None: - dagrun_end_date = pendulum.now(timezone.utc) - else: - dagrun_end_date = pendulum.instance(self.bf_end_date) - dagrun_infos = list(self.dag.iter_dagrun_infos_between(dagrun_start_date, dagrun_end_date)) - if self.run_backwards: - tasks_that_depend_on_past = [t.task_id for t in self.dag.task_dict.values() if t.depends_on_past] - if tasks_that_depend_on_past: - raise AirflowException( - f"You cannot backfill backwards because one or more " - f'tasks depend_on_past: {",".join(tasks_that_depend_on_past)}' - ) - dagrun_infos = dagrun_infos[::-1] - - if not dagrun_infos: - if not self.run_at_least_once: - self.log.info("No run dates were found for the given dates and dag interval.") - return - dagrun_infos = [DagRunInfo.interval(dagrun_start_date, dagrun_end_date)] - - running_dagruns = DagRun.find( - dag_id=self.dag.dag_id, - execution_start_date=self.bf_start_date, - execution_end_date=self.bf_end_date, - no_backfills=True, - state=DagRunState.RUNNING, - ) - - if running_dagruns: - for run in running_dagruns: - self.log.error( - "Backfill cannot be created for DagRun %s in %s, as there's already %s in a RUNNING " - "state.", - run.run_id, - run.execution_date.strftime("%Y-%m-%dT%H:%M:%S"), - run.run_type, - ) - self.log.error( - "Changing DagRun into BACKFILL would cause scheduler to lose track of executing " - "tasks. Not changing DagRun type into BACKFILL, and trying insert another DagRun into " - "database would cause database constraint violation for dag_id + execution_date " - "combination. Please adjust backfill dates or wait for this DagRun to finish.", - ) - return - pickle_id = None - - _support_pickling = [] - - for executor in self.job.executors: - _support_pickling.append(executor.supports_pickling) - - executor.job_id = self.job.id - executor.start() - - if not self.donot_pickle and all(_support_pickling): - pickle = DagPickle(self.dag) - session.add(pickle) - session.commit() - pickle_id = pickle.id - - ti_status.total_runs = len(dagrun_infos) # total dag runs in backfill - - try: - remaining_dates = ti_status.total_runs - while remaining_dates > 0: - dagrun_infos_to_process = [ - dagrun_info - for dagrun_info in dagrun_infos - if dagrun_info.logical_date not in ti_status.executed_dag_run_dates - ] - self._execute_dagruns( - dagrun_infos=dagrun_infos_to_process, - ti_status=ti_status, - pickle_id=pickle_id, - start_date=start_date, - session=session, - ) - - remaining_dates = ti_status.total_runs - len(ti_status.executed_dag_run_dates) - err = "".join(self._collect_errors(ti_status=ti_status, session=session)) - if err: - if not self.continue_on_failures or ti_status.deadlocked: - raise BackfillUnfinished(err, ti_status) - - if remaining_dates > 0: - self.log.info( - "max_active_runs limit for dag %s has been reached " - " - waiting for other dag runs to finish", - self.dag_id, - ) - time.sleep(self.delay_on_limit_secs) - except (KeyboardInterrupt, SystemExit): - self.log.warning("Backfill terminated by user.") - - # TODO: we will need to terminate running task instances and set the - # state to failed. - self._set_unfinished_dag_runs_to_failed(ti_status.active_runs) - except OperationalError: - self.log.exception( - "Backfill job dead-locked. The job will retry the job so it is likely " - "to heal itself. If your backfill succeeds you can ignore this exception.", - ) - raise - finally: - session.commit() - for executor in self.job.executors: - executor.end() - - self.log.info("Backfill done for DAG %s. Exiting.", self.dag) - - @provide_session - def reset_state_for_orphaned_tasks( - self, - filter_by_dag_run: DagRun | None = None, - session: Session = NEW_SESSION, - ) -> int | None: - """ - Reset state of orphaned tasks. - - This function checks if there are any tasks in the dagrun (or all) that - have a schedule or queued states but are not known by the executor. If - it finds those it will reset the state to None so they will get picked - up again. The batch option is for performance reasons as the queries - are made in sequence. - - :param filter_by_dag_run: the dag_run we want to process, None if all - :return: the number of TIs reset - """ - queued_tis = [] - running_tis = [] - for executor in self.job.executors: - queued_tis.append(executor.queued_tasks) - # also consider running as the state might not have changed in the db yet - running_tis.append(executor.running) - - # Can't use an update here since it doesn't support joins. - resettable_states = [TaskInstanceState.SCHEDULED, TaskInstanceState.QUEUED] - if filter_by_dag_run is None: - resettable_tis = ( - session.scalars( - select(TaskInstance) - .join(TaskInstance.dag_run) - .where( - DagRun.state == DagRunState.RUNNING, - DagRun.run_type != DagRunType.BACKFILL_JOB, - TaskInstance.state.in_(resettable_states), - ) - ) - ).all() - else: - resettable_tis = filter_by_dag_run.get_task_instances(state=resettable_states, session=session) - - tis_to_reset = [ti for ti in resettable_tis if ti.key not in queued_tis and ti.key not in running_tis] - if not tis_to_reset: - return 0 - - def query(result, items): - if not items: - return result - - filter_for_tis = TaskInstance.filter_for_tis(items) - reset_tis = session.scalars( - select(TaskInstance) - .where(filter_for_tis, TaskInstance.state.in_(resettable_states)) - .with_for_update() - ).all() - - for ti in reset_tis: - ti.state = None - session.merge(ti) - - return result + reset_tis - - reset_tis = helpers.reduce_in_chunks(query, tis_to_reset, [], self.job.max_tis_per_query) - - task_instance_str = "\n".join(f"\t{x!r}" for x in reset_tis) - session.flush() - - self.log.info("Reset the following %s TaskInstances:\n%s", len(reset_tis), task_instance_str) - return len(reset_tis) diff --git a/airflow/jobs/job.py b/airflow/jobs/job.py index 03bf92d4e3d53..0c2db219ef957 100644 --- a/airflow/jobs/job.py +++ b/airflow/jobs/job.py @@ -77,8 +77,6 @@ class Job(Base, LoggingMixin): The ORM class representing Job stored in the database. Jobs are processing items with state and duration that aren't task instances. - For instance a BackfillJob is a collection of task instance runs, - but should have its own state, start and end time. """ __tablename__ = "job" @@ -117,7 +115,7 @@ class Job(Base, LoggingMixin): """ TaskInstances which have been enqueued by this Job. - Only makes sense for SchedulerJob and BackfillJob instances. + Only makes sense for SchedulerJob. """ def __init__(self, executor: BaseExecutor | None = None, heartrate=None, **kwargs): diff --git a/airflow/models/dag.py b/airflow/models/dag.py index f5def92ea92a7..1dfb3b2e9114d 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -2381,8 +2381,7 @@ def add_logger_if_needed(ti: TaskInstance): tasks = self.task_dict self.log.debug("starting dagrun") # Instead of starting a scheduler, we run the minimal loop possible to check - # for task readiness and dependency management. This is notably faster - # than creating a BackfillJob and allows us to surface logs to the user + # for task readiness and dependency management. # ``Dag.test()`` works in two different modes depending on ``use_executor``: # - if ``use_executor`` is False, runs the task locally with no executor using ``_run_task`` diff --git a/airflow/models/dagpickle.py b/airflow/models/dagpickle.py index e6f4561d8e1bf..c06ef09709f1c 100644 --- a/airflow/models/dagpickle.py +++ b/airflow/models/dagpickle.py @@ -32,7 +32,7 @@ class DagPickle(Base): """ - Represents a version of a DAG and becomes a source of truth for a BackfillJob execution. + Represents a version of a DAG and becomes a source of truth for an execution. Dags can originate from different places (user repos, main repo, ...) and also get executed in different places (different executors). A pickle is a native python serialized object, diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index 0373bc667bfc6..cad82e72b8b2d 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -1111,7 +1111,7 @@ def notify_dagrun_state_changed(self, msg: str = ""): elif self.state == DagRunState.FAILED: get_listener_manager().hook.on_dag_run_failed(dag_run=self, msg=msg) # deliberately not notifying on QUEUED - # we can't get all the state changes on SchedulerJob, BackfillJob + # we can't get all the state changes on SchedulerJob, # or LocalTaskJob, so we don't want to "falsely advertise" we notify about that def _get_ready_tis( diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index e75ad83923bf2..5b51bb0d24df8 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -2640,7 +2640,7 @@ def _check_and_change_state_before_execution( :param mark_success: Don't run the task, mark its state as success :param test_mode: Doesn't record success or failure in the DB :param hostname: The hostname of the worker running the task instance. - :param job_id: Job (BackfillJob / LocalTaskJob / SchedulerJob) ID + :param job_id: Job (LocalTaskJob / SchedulerJob) ID :param pool: specifies the pool to use to run the task instance :param external_executor_id: The identifier of the celery executor :param session: SQLAlchemy ORM Session diff --git a/airflow/ti_deps/dependencies_deps.py b/airflow/ti_deps/dependencies_deps.py index 44d6bfc5c7db7..c167cdb346325 100644 --- a/airflow/ti_deps/dependencies_deps.py +++ b/airflow/ti_deps/dependencies_deps.py @@ -17,7 +17,6 @@ from __future__ import annotations from airflow.ti_deps.dependencies_states import ( - BACKFILL_QUEUEABLE_STATES, QUEUEABLE_STATES, RUNNABLE_STATES, ) @@ -48,13 +47,6 @@ TaskNotRunningDep(), } -BACKFILL_QUEUED_DEPS = { - RunnableExecDateDep(), - ValidStateDep(BACKFILL_QUEUEABLE_STATES), - DagrunRunningDep(), - TaskNotRunningDep(), -} - # TODO(aoen): SCHEDULER_QUEUED_DEPS is not coupled to actual scheduling/execution # in any way and could easily be modified or removed from the scheduler causing # this dependency to become outdated and incorrect. This coupling should be created diff --git a/airflow/ti_deps/dependencies_states.py b/airflow/ti_deps/dependencies_states.py index fd25d62f6d75e..ebf581ab48e18 100644 --- a/airflow/ti_deps/dependencies_states.py +++ b/airflow/ti_deps/dependencies_states.py @@ -42,12 +42,3 @@ QUEUEABLE_STATES = { TaskInstanceState.SCHEDULED, } - -BACKFILL_QUEUEABLE_STATES = { - # For cases like unit tests and run manually - None, - TaskInstanceState.UP_FOR_RESCHEDULE, - TaskInstanceState.UP_FOR_RETRY, - # For normal backfill cases - TaskInstanceState.SCHEDULED, -} diff --git a/docs/apache-airflow/administration-and-deployment/listeners.rst b/docs/apache-airflow/administration-and-deployment/listeners.rst index 1fca915a6f1df..8ca3ed93fc091 100644 --- a/docs/apache-airflow/administration-and-deployment/listeners.rst +++ b/docs/apache-airflow/administration-and-deployment/listeners.rst @@ -34,7 +34,7 @@ Lifecycle Events - ``on_starting`` - ``before_stopping`` -Lifecycle events allow you to react to start and stop events for an Airflow ``Job``, like ``SchedulerJob`` or ``BackfillJob``. +Lifecycle events allow you to react to start and stop events for an Airflow ``Job``, like ``SchedulerJob``. DagRun State Change Events -------------------------- diff --git a/docs/apache-airflow/howto/listener-plugin.rst b/docs/apache-airflow/howto/listener-plugin.rst index 7b46a9de8a969..7d5fb430fb486 100644 --- a/docs/apache-airflow/howto/listener-plugin.rst +++ b/docs/apache-airflow/howto/listener-plugin.rst @@ -44,8 +44,8 @@ Using this plugin, following events can be listened: * dag run is in running state. * dag run is in success state. * dag run is in failure state. - * on start before event like airflow job, scheduler or backfilljob - * before stop for event like airflow job, scheduler or backfilljob + * on start before event like airflow job, scheduler + * before stop for event like airflow job, scheduler Listener Registration --------------------- diff --git a/tests/core/test_impersonation_tests.py b/tests/core/test_impersonation_tests.py index cf9359c863372..8350e95a8f4fd 100644 --- a/tests/core/test_impersonation_tests.py +++ b/tests/core/test_impersonation_tests.py @@ -28,13 +28,10 @@ import pytest from airflow.configuration import conf -from airflow.jobs.backfill_job_runner import BackfillJobRunner -from airflow.jobs.job import Job, run_job -from airflow.models import DagBag, DagRun, TaskInstance +from airflow.models import DagBag, TaskInstance from airflow.utils.db import add_default_pool_if_not_exists from airflow.utils.state import State from airflow.utils.timezone import datetime -from airflow.utils.types import DagRunType from dev.tests_common.test_utils import db @@ -175,17 +172,14 @@ def get_dagbag(dag_folder): logger.info(dagbag.dagbag_report()) return dagbag - def run_backfill(self, dag_id, task_id): + def run_dag(self, dag_id, task_id): dag = self.dagbag.get_dag(dag_id) dag.clear() - job = Job() - job_runner = BackfillJobRunner(job=job, dag=dag, start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) - run_job(job=job, execute_callable=job_runner._execute) - run_id = DagRun.generate_run_id(DagRunType.BACKFILL_JOB, execution_date=DEFAULT_DATE) - ti = TaskInstance(task=dag.get_task(task_id), run_id=run_id) - ti.refresh_from_db() + dr = dag.test(use_executor=True) + ti = TaskInstance(task=dag.get_task(task_id), run_id=dr.run_id) + ti.refresh_from_db() assert ti.state == State.SUCCESS @@ -198,14 +192,14 @@ def test_impersonation(self): """ Tests that impersonating a unix user works """ - self.run_backfill("test_impersonation", "test_impersonated_user") + self.run_dag("test_impersonation", "test_impersonated_user") def test_no_impersonation(self): """ If default_impersonation=None, tests that the job is run as the current user (which will be a sudoer) """ - self.run_backfill( + self.run_dag( "test_no_impersonation", "test_superuser", ) @@ -216,7 +210,7 @@ def test_default_impersonation(self, monkeypatch): to running as TEST_USER for a test without 'run_as_user' set. """ monkeypatch.setenv("AIRFLOW__CORE__DEFAULT_IMPERSONATION", TEST_USER) - self.run_backfill("test_default_impersonation", "test_deelevated_user") + self.run_dag("test_default_impersonation", "test_deelevated_user") class TestImpersonationWithCustomPythonPath(BaseImpersonationTest): @@ -233,4 +227,4 @@ def test_impersonation_custom(self, monkeypatch): """ monkeypatch.setenv("PYTHONPATH", TEST_UTILS_FOLDER) assert TEST_UTILS_FOLDER not in sys.path - self.run_backfill("impersonation_with_custom_pkg", "exec_python_fn") + self.run_dag("impersonation_with_custom_pkg", "exec_python_fn") diff --git a/tests/jobs/test_backfill_job.py b/tests/jobs/test_backfill_job.py deleted file mode 100644 index 616f328ee4177..0000000000000 --- a/tests/jobs/test_backfill_job.py +++ /dev/null @@ -1,2094 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import datetime -import json -import logging -import threading -from collections import defaultdict -from importlib import reload -from unittest import mock -from unittest.mock import Mock, patch - -import pytest - -from airflow import settings -from airflow.exceptions import ( - AirflowException, - BackfillUnfinished, - DagConcurrencyLimitReached, - NoAvailablePoolSlot, - TaskConcurrencyLimitReached, - UnknownExecutorException, -) -from airflow.executors.debug_executor import DebugExecutor -from airflow.executors.executor_loader import ExecutorLoader -from airflow.executors.sequential_executor import SequentialExecutor -from airflow.jobs.backfill_job_runner import BackfillJobRunner -from airflow.jobs.job import Job, run_job -from airflow.listeners.listener import get_listener_manager -from airflow.models import DagBag, Pool, TaskInstance as TI -from airflow.models.dagrun import DagRun -from airflow.models.serialized_dag import SerializedDagModel -from airflow.models.taskinstancekey import TaskInstanceKey -from airflow.models.taskmap import TaskMap -from airflow.operators.empty import EmptyOperator -from airflow.utils import timezone -from airflow.utils.session import create_session -from airflow.utils.state import DagRunState, State, TaskInstanceState -from airflow.utils.trigger_rule import TriggerRule -from airflow.utils.types import DagRunType -from tests.listeners import dag_listener -from tests.models import TEST_DAGS_FOLDER - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import ( - clear_db_dags, - clear_db_pools, - clear_db_runs, - clear_db_xcom, - set_default_pool_slots, -) -from dev.tests_common.test_utils.mock_executor import MockExecutor - -if AIRFLOW_V_3_0_PLUS: - from airflow.utils.types import DagRunTriggeredByType - -pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] - -logger = logging.getLogger(__name__) - -DEFAULT_DATE = timezone.datetime(2016, 1, 1) -DEFAULT_DAG_RUN_ID = "test1" - - -@pytest.fixture(scope="module") -def dag_bag(): - return DagBag(include_examples=True) - - -class SecondaryMockExecutor(MockExecutor): - """Copy of MockExecutor class with a new name for testing with hybrid executors (which currently - disallows using the same executor concurrently)""" - - -def _mock_executor(executor=None): - if not executor: - default_executor = MockExecutor() - else: - if isinstance(executor, type): - default_executor = executor() - else: - default_executor = executor - - default_executor.name = mock.MagicMock( - alias="default_exec", - module_path=f"{default_executor.__module__}.{default_executor.__class__.__qualname__}", - ) - with mock.patch("airflow.jobs.job.Job.executors", new_callable=mock.PropertyMock) as executors_mock: - with mock.patch("airflow.jobs.job.Job.executor", new_callable=mock.PropertyMock) as executor_mock: - with mock.patch("airflow.executors.executor_loader.ExecutorLoader.load_executor") as loader_mock: - executor_mock.return_value = default_executor - executors_mock.return_value = [default_executor] - # The executor is mocked, so cannot be loaded/imported. Mock load_executor and return the - # correct object for the given input executor name. - loader_mock.side_effect = lambda *x: { - ("default_exec",): default_executor, - ("default.exec.module.path",): default_executor, - (None,): default_executor, - }[x] - - # Reload the job runner so that it gets a fresh instances of the mocked executor loader - from airflow.jobs import backfill_job_runner - - reload(backfill_job_runner) - - yield default_executor - - -@pytest.mark.execution_timeout(120) -class TestBackfillJob: - @pytest.fixture - def mock_executor(self): - yield from _mock_executor() - - def _mock_executors(self): - default_executor = MockExecutor() - _default_executor = Mock(wraps=default_executor) - default_alias = "default_exec" - default_module_path = f"{default_executor.__module__}.{default_executor.__class__.__qualname__}" - _default_executor.name = mock.MagicMock(alias=default_alias, module_path=default_module_path) - - secondary_executor = SecondaryMockExecutor() - _secondary_executor = Mock(wraps=secondary_executor) - secondary_alias = "secondary_exec" - secondary_module_path = f"{secondary_executor.__module__}.{secondary_executor.__class__.__qualname__}" - _secondary_executor.name = mock.MagicMock(alias=secondary_alias, module_path=secondary_module_path) - - with mock.patch( - "airflow.jobs.job.Job.executors", new_callable=mock.PropertyMock - ) as executors_mock, mock.patch( - "airflow.jobs.job.Job.executor", new_callable=mock.PropertyMock - ) as executor_mock, mock.patch( - "airflow.executors.executor_loader.ExecutorLoader.load_executor" - ) as loader_mock, conf_vars( - { - ( - "core", - "executor", - ): f"{default_alias}:{default_module_path},{secondary_alias}:{secondary_module_path}" - } - ): - # The executor is mocked, so cannot be loaded/imported. Mock load_executor and return the - # correct object for the given input executor name. - loader_mock.side_effect = lambda *x: { - (_secondary_executor.name.alias,): _secondary_executor, - (_secondary_executor.name.module_path,): _secondary_executor, - (default_alias,): _default_executor, - (default_module_path,): _default_executor, - (None,): _default_executor, - }[x] - - executor_mock.return_value = _default_executor - executors_mock.return_value = [_default_executor, _secondary_executor] - - yield (_default_executor, _secondary_executor) - - @pytest.fixture - def mock_executors(self): - yield from self._mock_executors() - - @staticmethod - def clean_db(): - clear_db_dags() - clear_db_runs() - clear_db_xcom() - clear_db_pools() - - @pytest.fixture(autouse=True) - def set_instance_attrs(self, dag_bag): - self.clean_db() - self.dagbag = dag_bag - # `airflow tasks run` relies on serialized_dag - for dag in self.dagbag.dags.values(): - SerializedDagModel.write_dag(dag) - - def _get_dummy_dag( - self, - dag_maker_fixture, - dag_id="test_dag", - pool=Pool.DEFAULT_POOL_NAME, - max_active_tis_per_dag=None, - task_id="op", - **kwargs, - ): - with dag_maker_fixture(dag_id=dag_id, schedule="@daily", **kwargs) as dag: - EmptyOperator(task_id=task_id, pool=pool, max_active_tis_per_dag=max_active_tis_per_dag) - - return dag - - def _times_called_with(self, method, class_): - count = 0 - for args in method.call_args_list: - if isinstance(args[0][0], class_): - count += 1 - return count - - def test_unfinished_dag_runs_set_to_failed(self, dag_maker): - dag = self._get_dummy_dag(dag_maker) - dag_run = dag_maker.create_dagrun(state=None) - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=8), - ignore_first_depends_on_past=True, - ) - - job_runner._set_unfinished_dag_runs_to_failed([dag_run]) - dag_run.refresh_from_db() - - assert State.FAILED == dag_run.state - - def test_dag_run_with_finished_tasks_set_to_success(self, dag_maker, mock_executor): - dag = self._get_dummy_dag(dag_maker) - dag_run = dag_maker.create_dagrun(state=None) - - for ti in dag_run.get_task_instances(): - ti.set_state(State.SUCCESS) - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=8), - ignore_first_depends_on_past=True, - ) - job_runner._set_unfinished_dag_runs_to_failed([dag_run]) - - dag_run.refresh_from_db() - - assert State.SUCCESS == dag_run.state - - @pytest.mark.backend("postgres", "mysql") - def test_trigger_controller_dag(self, session): - dag = self.dagbag.get_dag("example_trigger_controller_dag") - target_dag = self.dagbag.get_dag("example_trigger_target_dag") - target_dag.sync_to_db() - - target_dag_run = session.query(DagRun).filter(DagRun.dag_id == target_dag.dag_id).one_or_none() - assert target_dag_run is None - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE, - ignore_first_depends_on_past=True, - ) - - run_job(job=job, execute_callable=job_runner._execute) - - dag_run = session.query(DagRun).filter(DagRun.dag_id == dag.dag_id).one_or_none() - assert dag_run is not None - - task_instances_list = job_runner._task_instances_for_dag_run(dag=dag, dag_run=dag_run) - - assert task_instances_list - - @pytest.mark.backend("postgres", "mysql") - def test_backfill_multi_dates(self, mock_executor): - dag = self.dagbag.get_dag("miscellaneous_test_dag") - - end_date = DEFAULT_DATE + datetime.timedelta(days=1) - - job = Job() - executor = job.executor - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=end_date, - ignore_first_depends_on_past=True, - ) - - run_job(job=job, execute_callable=job_runner._execute) - expected_execution_order = [ - ("runme_0", DEFAULT_DATE), - ("runme_1", DEFAULT_DATE), - ("runme_2", DEFAULT_DATE), - ("runme_0", end_date), - ("runme_1", end_date), - ("runme_2", end_date), - ("also_run_this", DEFAULT_DATE), - ("also_run_this", end_date), - ("run_after_loop", DEFAULT_DATE), - ("run_after_loop", end_date), - ("run_this_last", DEFAULT_DATE), - ("run_this_last", end_date), - ] - actual = [(tuple(x), y) for x, y in executor.sorted_tasks] - expected = [ - ( - (dag.dag_id, task_id, f"backfill__{when.isoformat()}", 1, -1), - (State.SUCCESS, None), - ) - for (task_id, when) in expected_execution_order - ] - assert actual == expected - session = settings.Session() - drs = session.query(DagRun).filter(DagRun.dag_id == dag.dag_id).order_by(DagRun.execution_date).all() - - assert drs[0].execution_date == DEFAULT_DATE - assert drs[0].state == State.SUCCESS - assert drs[1].execution_date == DEFAULT_DATE + datetime.timedelta(days=1) - assert drs[1].state == State.SUCCESS - - dag.clear() - session.close() - - @pytest.mark.backend("postgres", "mysql") - @pytest.mark.parametrize( - "dag_id, expected_execution_order", - [ - [ - "example_branch_operator", - ( - "run_this_first", - "branching", - "branch_a", - "branch_b", - "branch_c", - "branch_d", - "follow_a", - "follow_b", - "follow_c", - "follow_d", - "join", - "branching_ext_python", - "ext_py_a", - "ext_py_b", - "ext_py_c", - "ext_py_d", - "join_ext_python", - "branching_venv", - "venv_a", - "venv_b", - "venv_c", - "venv_d", - "join_venv", - ), - ], - [ - "miscellaneous_test_dag", - ("runme_0", "runme_1", "runme_2", "also_run_this", "run_after_loop", "run_this_last"), - ], - [ - "example_skip_dag", - ( - "always_true_1", - "always_true_2", - "skip_operator_1", - "skip_operator_2", - "all_success", - "one_success", - "final_1", - "final_2", - ), - ], - ["latest_only", ("latest_only", "task1")], - ], - ) - def test_backfill_examples(self, dag_id, expected_execution_order, mock_executor): - """ - Test backfilling example dags - - Try to backfill some of the example dags. Be careful, not all dags are suitable - for doing this. For example, a dag that sleeps forever, or does not have a - schedule won't work here since you simply can't backfill them. - """ - dag = self.dagbag.get_dag(dag_id) - - logger.info("*** Running example DAG: %s", dag.dag_id) - job = Job() - executor = job.executor - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE, - ignore_first_depends_on_past=True, - ) - - run_job(job=job, execute_callable=job_runner._execute) - assert [ - ((dag_id, task_id, f"backfill__{DEFAULT_DATE.isoformat()}", 1, -1), (State.SUCCESS, None)) - for task_id in expected_execution_order - ] == executor.sorted_tasks - - def test_backfill_conf(self, dag_maker, mock_executor): - dag = self._get_dummy_dag(dag_maker, dag_id="test_backfill_conf") - dag_maker.create_dagrun(state=None) - - conf_ = json.loads("""{"key": "value"}""") - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - conf=conf_, - ) - run_job(job=job, execute_callable=job_runner._execute) - - # We ignore the first dag_run created by fixture - dr = DagRun.find( - dag_id="test_backfill_conf", execution_start_date=DEFAULT_DATE + datetime.timedelta(days=1) - ) - - assert conf_ == dr[0].conf - - def test_backfill_respect_max_active_tis_per_dag_limit(self, dag_maker, mock_executor): - max_active_tis_per_dag = 2 - dag = self._get_dummy_dag( - dag_maker, - dag_id="test_backfill_respect_max_active_tis_per_dag_limit", - max_active_tis_per_dag=max_active_tis_per_dag, - ) - dag_maker.create_dagrun(state=None) - - job = Job() - executor = job.executor - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=7), - ) - - mock_log = Mock() - job_runner._log = mock_log - - run_job(job=job, execute_callable=job_runner._execute) - - assert len(executor.history) > 0 - - task_concurrency_limit_reached_at_least_once = False - - num_running_task_instances = 0 - for running_task_instances in executor.history: - assert len(running_task_instances) <= max_active_tis_per_dag - num_running_task_instances += len(running_task_instances) - if len(running_task_instances) == max_active_tis_per_dag: - task_concurrency_limit_reached_at_least_once = True - - assert 8 == num_running_task_instances - assert task_concurrency_limit_reached_at_least_once - - times_dag_concurrency_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - DagConcurrencyLimitReached, - ) - - times_pool_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - NoAvailablePoolSlot, - ) - - times_task_concurrency_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - TaskConcurrencyLimitReached, - ) - - assert 0 == times_pool_limit_reached_in_debug - assert 0 == times_dag_concurrency_limit_reached_in_debug - assert times_task_concurrency_limit_reached_in_debug > 0 - - @pytest.mark.parametrize("with_max_active_tis_per_dag", [False, True]) - def test_backfill_respect_max_active_tis_per_dagrun_limit( - self, dag_maker, with_max_active_tis_per_dag, mock_executor - ): - max_active_tis_per_dag = 3 - max_active_tis_per_dagrun = 2 - kwargs = {"max_active_tis_per_dagrun": max_active_tis_per_dagrun} - if with_max_active_tis_per_dag: - kwargs["max_active_tis_per_dag"] = max_active_tis_per_dag - - with dag_maker(dag_id="test_backfill_respect_max_active_tis_per_dag_limit", schedule="@daily") as dag: - EmptyOperator.partial(task_id="task1", **kwargs).expand_kwargs([{"x": i} for i in range(10)]) - - dag_maker.create_dagrun(state=None) - - job = Job() - executor = job.executor - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=7), - ) - - mock_log = Mock() - job_runner._log = mock_log - - run_job(job=job, execute_callable=job_runner._execute) - - assert len(executor.history) > 0 - - task_concurrency_limit_reached_at_least_once = False - - def get_running_tis_per_dagrun(running_tis): - running_tis_per_dagrun_dict = defaultdict(int) - for running_ti in running_tis: - running_tis_per_dagrun_dict[running_ti[3].dag_run.id] += 1 - return running_tis_per_dagrun_dict - - num_running_task_instances = 0 - for running_task_instances in executor.history: - if with_max_active_tis_per_dag: - assert len(running_task_instances) <= max_active_tis_per_dag - running_tis_per_dagrun_dict = get_running_tis_per_dagrun(running_task_instances) - assert all( - [ - num_running_tis <= max_active_tis_per_dagrun - for num_running_tis in running_tis_per_dagrun_dict.values() - ] - ) - num_running_task_instances += len(running_task_instances) - task_concurrency_limit_reached_at_least_once = ( - task_concurrency_limit_reached_at_least_once - or any( - [ - num_running_tis == max_active_tis_per_dagrun - for num_running_tis in running_tis_per_dagrun_dict.values() - ] - ) - ) - - assert 80 == num_running_task_instances # (7 backfill run + 1 manual run ) * 10 mapped task per run - assert task_concurrency_limit_reached_at_least_once - - times_dag_concurrency_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - DagConcurrencyLimitReached, - ) - - times_pool_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - NoAvailablePoolSlot, - ) - - times_task_concurrency_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - TaskConcurrencyLimitReached, - ) - - assert 0 == times_pool_limit_reached_in_debug - assert 0 == times_dag_concurrency_limit_reached_in_debug - assert times_task_concurrency_limit_reached_in_debug > 0 - - def test_backfill_respect_dag_concurrency_limit(self, dag_maker, mock_executor): - dag = self._get_dummy_dag(dag_maker, dag_id="test_backfill_respect_concurrency_limit") - dag_maker.create_dagrun(state=None) - dag.max_active_tasks = 2 - - job = Job() - executor = job.executor - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=7), - ) - - mock_log = Mock() - job_runner._log = mock_log - - run_job(job=job, execute_callable=job_runner._execute) - - assert len(executor.history) > 0 - - concurrency_limit_reached_at_least_once = False - - num_running_task_instances = 0 - - for running_task_instances in executor.history: - assert len(running_task_instances) <= dag.max_active_tasks - num_running_task_instances += len(running_task_instances) - if len(running_task_instances) == dag.max_active_tasks: - concurrency_limit_reached_at_least_once = True - - assert 8 == num_running_task_instances - assert concurrency_limit_reached_at_least_once - - times_dag_concurrency_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - DagConcurrencyLimitReached, - ) - - times_pool_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - NoAvailablePoolSlot, - ) - - times_task_concurrency_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - TaskConcurrencyLimitReached, - ) - - assert 0 == times_pool_limit_reached_in_debug - assert 0 == times_task_concurrency_limit_reached_in_debug - assert times_dag_concurrency_limit_reached_in_debug > 0 - - def test_backfill_respect_default_pool_limit(self, dag_maker, mock_executor): - default_pool_slots = 2 - set_default_pool_slots(default_pool_slots) - - dag = self._get_dummy_dag(dag_maker, dag_id="test_backfill_with_no_pool_limit") - dag_maker.create_dagrun(state=None) - - job = Job() - executor = job.executor - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=7), - ) - - mock_log = Mock() - job_runner._log = mock_log - - run_job(job=job, execute_callable=job_runner._execute) - - assert len(executor.history) > 0 - - default_pool_task_slot_count_reached_at_least_once = False - - num_running_task_instances = 0 - - # if no pool is specified, the number of tasks running in - # parallel per backfill should be less than - # default_pool slots at any point of time. - for running_task_instances in executor.history: - assert len(running_task_instances) <= default_pool_slots - num_running_task_instances += len(running_task_instances) - if len(running_task_instances) == default_pool_slots: - default_pool_task_slot_count_reached_at_least_once = True - - assert 8 == num_running_task_instances - assert default_pool_task_slot_count_reached_at_least_once - - times_dag_concurrency_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - DagConcurrencyLimitReached, - ) - - times_pool_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - NoAvailablePoolSlot, - ) - - times_task_concurrency_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - TaskConcurrencyLimitReached, - ) - - assert 0 == times_dag_concurrency_limit_reached_in_debug - assert 0 == times_task_concurrency_limit_reached_in_debug - assert times_pool_limit_reached_in_debug > 0 - - def test_backfill_pool_not_found(self, dag_maker, mock_executor): - dag = self._get_dummy_dag( - dag_maker, - dag_id="test_backfill_pool_not_found", - pool="king_pool", - ) - dag_maker.create_dagrun(state=None) - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=7), - ) - - try: - run_job(job=job, execute_callable=job_runner._execute) - except AirflowException: - return - - def test_backfill_respect_pool_limit(self, dag_maker, mock_executor): - session = settings.Session() - - slots = 2 - pool = Pool( - pool="pool_with_two_slots", - slots=slots, - include_deferred=False, - ) - session.add(pool) - session.commit() - - dag = self._get_dummy_dag( - dag_maker, - dag_id="test_backfill_respect_pool_limit", - pool=pool.pool, - ) - dag_maker.create_dagrun(state=None) - - job = Job() - executor = job.executor - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=7), - ) - - mock_log = Mock() - job_runner._log = mock_log - - run_job(job=job, execute_callable=job_runner._execute) - - assert len(executor.history) > 0 - - pool_was_full_at_least_once = False - num_running_task_instances = 0 - - for running_task_instances in executor.history: - assert len(running_task_instances) <= slots - num_running_task_instances += len(running_task_instances) - if len(running_task_instances) == slots: - pool_was_full_at_least_once = True - - assert 8 == num_running_task_instances - assert pool_was_full_at_least_once - - times_dag_concurrency_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - DagConcurrencyLimitReached, - ) - - times_pool_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - NoAvailablePoolSlot, - ) - - times_task_concurrency_limit_reached_in_debug = self._times_called_with( - mock_log.debug, - TaskConcurrencyLimitReached, - ) - - assert 0 == times_task_concurrency_limit_reached_in_debug - assert 0 == times_dag_concurrency_limit_reached_in_debug - assert times_pool_limit_reached_in_debug > 0 - - def test_backfill_run_rescheduled(self, dag_maker, mock_executor): - dag = self._get_dummy_dag( - dag_maker, dag_id="test_backfill_run_rescheduled", task_id="test_backfill_run_rescheduled_task-1" - ) - dag_maker.create_dagrun(state=None, run_id=DEFAULT_DAG_RUN_ID) - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - ) - run_job(job=job, execute_callable=job_runner._execute) - - ti = TI(task=dag.get_task("test_backfill_run_rescheduled_task-1"), run_id=DEFAULT_DAG_RUN_ID) - ti.refresh_from_db() - ti.set_state(State.UP_FOR_RESCHEDULE) - - for _ in _mock_executor(): - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - rerun_failed_tasks=True, - ) - run_job(job=job, execute_callable=job_runner._execute) - ti = TI(task=dag.get_task("test_backfill_run_rescheduled_task-1"), run_id=DEFAULT_DAG_RUN_ID) - ti.refresh_from_db() - assert ti.state == State.SUCCESS - - def test_backfill_override_conf(self, dag_maker, mock_executor): - dag = self._get_dummy_dag( - dag_maker, dag_id="test_backfill_override_conf", task_id="test_backfill_override_conf-1" - ) - dr = dag_maker.create_dagrun( - state=None, - start_date=DEFAULT_DATE, - ) - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - conf={"a": 1}, - ) - - with patch.object( - job_runner, - "_task_instances_for_dag_run", - wraps=job_runner._task_instances_for_dag_run, - ) as wrapped_task_instances_for_dag_run: - run_job(job=job, execute_callable=job_runner._execute) - dr = wrapped_task_instances_for_dag_run.call_args_list[0][0][1] - assert dr.conf == {"a": 1} - - def test_backfill_skip_active_scheduled_dagrun(self, dag_maker, caplog, mock_executor): - dag = self._get_dummy_dag( - dag_maker, - dag_id="test_backfill_skip_active_scheduled_dagrun", - task_id="test_backfill_skip_active_scheduled_dagrun-1", - ) - dag_maker.create_dagrun(run_type=DagRunType.SCHEDULED, state=State.RUNNING, run_id=DEFAULT_DAG_RUN_ID) - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - ) - with caplog.at_level(logging.ERROR, logger="airflow.jobs.backfill_job_runner.BackfillJob"): - caplog.clear() - run_job(job=job, execute_callable=job_runner._execute) - assert "Backfill cannot be created for DagRun" in caplog.messages[0] - - ti = TI(task=dag.get_task("test_backfill_skip_active_scheduled_dagrun-1"), run_id=DEFAULT_DAG_RUN_ID) - ti.refresh_from_db() - # since DAG backfill is skipped, task state should be none - assert ti.state == State.NONE - - def test_backfill_rerun_failed_tasks(self, dag_maker, mock_executor): - dag = self._get_dummy_dag( - dag_maker, dag_id="test_backfill_rerun_failed", task_id="test_backfill_rerun_failed_task-1" - ) - dag_maker.create_dagrun(state=None, run_id=DEFAULT_DAG_RUN_ID) - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - ) - run_job(job=job, execute_callable=job_runner._execute) - - ti = TI(task=dag.get_task("test_backfill_rerun_failed_task-1"), run_id=DEFAULT_DAG_RUN_ID) - ti.refresh_from_db() - ti.set_state(State.FAILED) - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - rerun_failed_tasks=True, - ) - run_job(job=job, execute_callable=job_runner._execute) - ti = TI(task=dag.get_task("test_backfill_rerun_failed_task-1"), run_id=DEFAULT_DAG_RUN_ID) - ti.refresh_from_db() - assert ti.state == State.SUCCESS - - def test_backfill_rerun_upstream_failed_tasks(self, dag_maker, mock_executor): - with dag_maker(dag_id="test_backfill_rerun_upstream_failed", schedule="@daily") as dag: - op1 = EmptyOperator(task_id="test_backfill_rerun_upstream_failed_task-1") - op2 = EmptyOperator(task_id="test_backfill_rerun_upstream_failed_task-2") - op1.set_upstream(op2) - dag_maker.create_dagrun(state=None, run_id=DEFAULT_DAG_RUN_ID) - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - ) - run_job(job=job, execute_callable=job_runner._execute) - - ti = TI(task=dag.get_task("test_backfill_rerun_upstream_failed_task-1"), run_id=DEFAULT_DAG_RUN_ID) - ti.refresh_from_db() - ti.set_state(State.UPSTREAM_FAILED) - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - rerun_failed_tasks=True, - ) - run_job(job=job, execute_callable=job_runner._execute) - ti = TI(task=dag.get_task("test_backfill_rerun_upstream_failed_task-1"), run_id=DEFAULT_DAG_RUN_ID) - ti.refresh_from_db() - assert ti.state == State.SUCCESS - - def test_backfill_rerun_failed_tasks_without_flag(self, dag_maker, mock_executor): - dag = self._get_dummy_dag( - dag_maker, dag_id="test_backfill_rerun_failed", task_id="test_backfill_rerun_failed_task-1" - ) - dag_maker.create_dagrun(state=None, run_id=DEFAULT_DAG_RUN_ID) - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - ) - run_job(job=job, execute_callable=job_runner._execute) - - ti = TI(task=dag.get_task("test_backfill_rerun_failed_task-1"), run_id=DEFAULT_DAG_RUN_ID) - ti.refresh_from_db() - ti.set_state(State.FAILED) - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - rerun_failed_tasks=False, - ) - - with pytest.raises(AirflowException): - run_job(job=job, execute_callable=job_runner._execute) - - def test_backfill_retry_intermittent_failed_task(self, dag_maker, mock_executor): - with dag_maker( - dag_id="test_intermittent_failure_job", - schedule="@daily", - default_args={ - "retries": 2, - "retry_delay": datetime.timedelta(seconds=0), - }, - ) as dag: - task1 = EmptyOperator(task_id="task1") - dag_maker.create_dagrun(state=None) - - executor = mock_executor - executor.mock_task_results[TaskInstanceKey(dag.dag_id, task1.task_id, DEFAULT_DATE, try_number=1)] = ( - State.UP_FOR_RETRY - ) - executor.mock_task_results[TaskInstanceKey(dag.dag_id, task1.task_id, DEFAULT_DATE, try_number=2)] = ( - State.UP_FOR_RETRY - ) - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - ) - run_job(job=job, execute_callable=job_runner._execute) - - def test_backfill_retry_always_failed_task(self, dag_maker, mock_executor): - with dag_maker( - dag_id="test_always_failure_job", - schedule="@daily", - default_args={ - "retries": 1, - "retry_delay": datetime.timedelta(seconds=0), - }, - ) as dag: - task1 = EmptyOperator(task_id="task1") - dr = dag_maker.create_dagrun(state=None) - - executor = mock_executor - executor.mock_task_results[TaskInstanceKey(dag.dag_id, task1.task_id, dr.run_id, try_number=0)] = ( - State.UP_FOR_RETRY - ) - executor.mock_task_fail(dag.dag_id, task1.task_id, dr.run_id, try_number=1) - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE, - ) - with pytest.raises(BackfillUnfinished): - run_job(job=job, execute_callable=job_runner._execute) - - def test_backfill_ordered_concurrent_execute(self, dag_maker, mock_executor): - with dag_maker( - dag_id="test_backfill_ordered_concurrent_execute", - schedule="@daily", - ) as dag: - op1 = EmptyOperator(task_id="leave1") - op2 = EmptyOperator(task_id="leave2") - op3 = EmptyOperator(task_id="upstream_level_1") - op4 = EmptyOperator(task_id="upstream_level_2") - op5 = EmptyOperator(task_id="upstream_level_3") - # order randomly - op2.set_downstream(op3) - op1.set_downstream(op3) - op4.set_downstream(op5) - op3.set_downstream(op4) - runid0 = f"backfill__{DEFAULT_DATE.isoformat()}" - dag_maker.create_dagrun(run_id=runid0) - - executor = mock_executor - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=2), - ) - run_job(job=job, execute_callable=job_runner._execute) - - runid1 = f"backfill__{(DEFAULT_DATE + datetime.timedelta(days=1)).isoformat()}" - runid2 = f"backfill__{(DEFAULT_DATE + datetime.timedelta(days=2)).isoformat()}" - - actual = [] - for batch in executor.history: - this_batch = [] - for cmd, idx, queue, ti in batch: # noqa: B007 - key = ti.key - this_batch.append((key.task_id, key.run_id)) - actual.append(sorted(this_batch)) - assert actual == [ - [ - ("leave1", runid0), - ("leave1", runid1), - ("leave1", runid2), - ("leave2", runid0), - ("leave2", runid1), - ("leave2", runid2), - ], - [ - ("upstream_level_1", runid0), - ("upstream_level_1", runid1), - ("upstream_level_1", runid2), - ], - [ - ("upstream_level_2", runid0), - ("upstream_level_2", runid1), - ("upstream_level_2", runid2), - ], - [ - ("upstream_level_3", runid0), - ("upstream_level_3", runid1), - ("upstream_level_3", runid2), - ], - ] - - @pytest.mark.parametrize("ignore_depends_on_past", [True, False]) - def test_backfill_depends_on_past_works_independently_on_ignore_depends_on_past( - self, ignore_depends_on_past, mock_executor - ): - dag = self.dagbag.get_dag("test_depends_on_past") - dag.clear() - run_date = DEFAULT_DATE + datetime.timedelta(days=5) - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=run_date, - end_date=run_date, - ignore_first_depends_on_past=ignore_depends_on_past, - ) - run_job(job=job, execute_callable=job_runner._execute) - - run_id = f"backfill__{run_date.isoformat()}" - # ti should have succeeded - ti = TI(dag.tasks[0], run_id=run_id) - ti.refresh_from_db() - assert ti.state == State.SUCCESS - - def test_backfill_depends_on_past_backwards(self, mock_executor): - """ - Test that CLI respects -B argument and raises on interaction with depends_on_past - """ - dag_id = "test_depends_on_past" - start_date = DEFAULT_DATE + datetime.timedelta(days=1) - end_date = start_date + datetime.timedelta(days=1) - kwargs = dict( - start_date=start_date, - end_date=end_date, - ) - dag = self.dagbag.get_dag(dag_id) - dag.clear() - - job = Job() - job_runner = BackfillJobRunner(job=job, dag=dag, ignore_first_depends_on_past=True, **kwargs) - run_job(job=job, execute_callable=job_runner._execute) - - run_id = f"backfill__{end_date.isoformat()}" - ti = TI(dag.get_task("test_dop_task"), run_id=run_id) - ti.refresh_from_db() - # runs fine forwards - assert ti.state == State.SUCCESS - - # raises backwards - expected_msg = "You cannot backfill backwards because one or more tasks depend_on_past: test_dop_task" - - for _ in _mock_executor(): - # Mock again to get a new executor - job = Job() - job_runner = BackfillJobRunner(job=job, dag=dag, run_backwards=True, **kwargs) - with pytest.raises(AirflowException, match=expected_msg): - run_job(job=job, execute_callable=job_runner._execute) - - def _get_dag_test_max_active_limits( - self, dag_maker_fixture, dag_id="test_dag", max_active_runs=1, **kwargs - ): - with dag_maker_fixture( - dag_id=dag_id, - schedule="@hourly", - max_active_runs=max_active_runs, - **kwargs, - ) as dag: - op1 = EmptyOperator(task_id="leave1") - op2 = EmptyOperator(task_id="leave2") - op3 = EmptyOperator(task_id="upstream_level_1") - op4 = EmptyOperator(task_id="upstream_level_2") - - op1 >> op2 >> op3 - op4 >> op3 - return dag - - def test_backfill_max_limit_check_within_limit(self, dag_maker, mock_executor): - dag = self._get_dag_test_max_active_limits( - dag_maker, dag_id="test_backfill_max_limit_check_within_limit", max_active_runs=16 - ) - dag_maker.create_dagrun(state=None) - start_date = DEFAULT_DATE - datetime.timedelta(hours=1) - end_date = DEFAULT_DATE - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=start_date, - end_date=end_date, - donot_pickle=True, - ) - run_job(job=job, execute_callable=job_runner._execute) - - dagruns = DagRun.find(dag_id=dag.dag_id) - assert 2 == len(dagruns) - assert all(run.state == State.SUCCESS for run in dagruns) - - def test_backfill_notifies_dagrun_listener(self, dag_maker, mock_executor): - dag = self._get_dummy_dag(dag_maker) - dag_run = dag_maker.create_dagrun(state=None) - dag_listener.clear() - get_listener_manager().add_listener(dag_listener) - - start_date = DEFAULT_DATE - datetime.timedelta(hours=1) - end_date = DEFAULT_DATE - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=start_date, - end_date=end_date, - donot_pickle=True, - ) - job.notification_threadpool = mock.MagicMock() - run_job(job=job, execute_callable=job_runner._execute) - - assert len(dag_listener.running) == 1 - assert len(dag_listener.success) == 1 - assert dag_listener.running[0].dag.dag_id == dag_run.dag.dag_id - assert dag_listener.running[0].run_id == dag_run.run_id - assert dag_listener.running[0].state == DagRunState.RUNNING - - assert dag_listener.success[0].dag.dag_id == dag_run.dag.dag_id - assert dag_listener.success[0].run_id == dag_run.run_id - assert dag_listener.success[0].state == DagRunState.SUCCESS - - def test_backfill_max_limit_check(self, dag_maker, mock_executor): - dag_id = "test_backfill_max_limit_check" - run_id = "test_dag_run" - start_date = DEFAULT_DATE - datetime.timedelta(hours=1) - end_date = DEFAULT_DATE - - dag_run_created_cond = threading.Condition() - - def run_backfill(cond): - cond.acquire() - # this session object is different than the one in the main thread - with create_session() as thread_session: - try: - dag = self._get_dag_test_max_active_limits( - dag_maker, - dag_id=dag_id, - ) - dag_maker.create_dagrun( - state=State.RUNNING, - # Existing dagrun that is not within the backfill range - run_id=run_id, - execution_date=DEFAULT_DATE + datetime.timedelta(hours=1), - ) - thread_session.commit() - cond.notify() - except Exception: - logger.exception("Exception when creating DagRun") - finally: - cond.release() - thread_session.close() - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=start_date, - end_date=end_date, - donot_pickle=True, - ) - run_job(job=job, execute_callable=job_runner._execute) - - backfill_job_thread = threading.Thread( - target=run_backfill, name="run_backfill", args=(dag_run_created_cond,) - ) - - dag_run_created_cond.acquire() - with create_session() as session: - backfill_job_thread.start() - try: - # at this point backfill can't run since the max_active_runs has been - # reached, so it is waiting - dag_run_created_cond.wait(timeout=1.5) - dagruns = DagRun.find(dag_id=dag_id) - logger.info("The dag runs retrieved: %s", dagruns) - assert 1 == len(dagruns) - dr = dagruns[0] - assert dr.run_id == run_id - - # allow the backfill to execute - # by setting the existing dag run to SUCCESS, - # backfill will execute dag runs 1 by 1 - dr.set_state(State.SUCCESS) - session.merge(dr) - session.commit() - - backfill_job_thread.join() - - dagruns = DagRun.find(dag_id=dag_id) - assert 3 == len(dagruns) # 2 from backfill + 1 existing - assert dagruns[-1].run_id == dr.run_id - finally: - dag_run_created_cond.release() - - def test_backfill_max_limit_check_no_count_existing(self, dag_maker, mock_executor): - start_date = DEFAULT_DATE - end_date = DEFAULT_DATE - # Existing dagrun that is within the backfill range - dag = self._get_dag_test_max_active_limits( - dag_maker, dag_id="test_backfill_max_limit_check_no_count_existing" - ) - dag_maker.create_dagrun(state=None) - - job = Job() - job_runner = BackfillJobRunner( - job=job, dag=dag, start_date=start_date, end_date=end_date, donot_pickle=True - ) - run_job(job=job, execute_callable=job_runner._execute) - - # BackfillJobRunner will run since the existing DagRun does not count for the max - # active limit since it's within the backfill date range. - dagruns = DagRun.find(dag_id=dag.dag_id) - # will only be able to run 1 (the existing one) since there's just - # one dag run slot left given the max_active_runs limit - assert 1 == len(dagruns) - assert State.SUCCESS == dagruns[0].state - - def test_backfill_max_limit_check_complete_loop(self, dag_maker, mock_executor): - dag = self._get_dag_test_max_active_limits( - dag_maker, dag_id="test_backfill_max_limit_check_complete_loop" - ) - dag_maker.create_dagrun(state=None) - start_date = DEFAULT_DATE - datetime.timedelta(hours=1) - end_date = DEFAULT_DATE - - # Given the max limit to be 1 in active dag runs, we need to run the - # backfill job 3 times - success_expected = 2 - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=start_date, - end_date=end_date, - donot_pickle=True, - ) - run_job(job=job, execute_callable=job_runner._execute) - - success_dagruns = len(DagRun.find(dag_id=dag.dag_id, state=State.SUCCESS)) - running_dagruns = len(DagRun.find(dag_id=dag.dag_id, state=State.RUNNING)) - assert success_expected == success_dagruns - assert 0 == running_dagruns # no dag_runs in running state are left - - def test_sub_set_subdag(self, dag_maker, mock_executor): - with dag_maker( - "test_sub_set_subdag", - on_success_callback=lambda _: None, - on_failure_callback=lambda _: None, - ) as dag: - op1 = EmptyOperator(task_id="leave1") - op2 = EmptyOperator(task_id="leave2") - op3 = EmptyOperator(task_id="upstream_level_1") - op4 = EmptyOperator(task_id="upstream_level_2") - op5 = EmptyOperator(task_id="upstream_level_3") - # order randomly - op2.set_downstream(op3) - op1.set_downstream(op3) - op4.set_downstream(op5) - op3.set_downstream(op4) - - dr = dag_maker.create_dagrun(state=None) - - sub_dag = dag.partial_subset( - task_ids_or_regex="leave*", include_downstream=False, include_upstream=False - ) - job = Job() - job_runner = BackfillJobRunner(job=job, dag=sub_dag, start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) - run_job(job=job, execute_callable=job_runner._execute) - - for ti in dr.get_task_instances(): - if ti.task_id == "leave1" or ti.task_id == "leave2": - assert State.SUCCESS == ti.state - else: - assert State.NONE == ti.state - - def test_backfill_fill_blanks(self, dag_maker, mock_executor): - with dag_maker( - "test_backfill_fill_blanks", - ) as dag: - op1 = EmptyOperator(task_id="op1") - op2 = EmptyOperator(task_id="op2") - op3 = EmptyOperator(task_id="op3") - op4 = EmptyOperator(task_id="op4") - op5 = EmptyOperator(task_id="op5") - op6 = EmptyOperator(task_id="op6") - - dr = dag_maker.create_dagrun(state=None) - - session = settings.Session() - - tis = dr.get_task_instances() - for ti in tis: - if ti.task_id == op1.task_id: - ti.state = State.UP_FOR_RETRY - ti.end_date = DEFAULT_DATE - elif ti.task_id == op2.task_id: - ti.state = State.FAILED - elif ti.task_id == op3.task_id: - ti.state = State.SKIPPED - elif ti.task_id == op4.task_id: - ti.state = State.SCHEDULED - elif ti.task_id == op5.task_id: - ti.state = State.UPSTREAM_FAILED - # op6 = None - session.merge(ti) - session.commit() - session.close() - - job = Job() - job_runner = BackfillJobRunner(job=job, dag=dag, start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) - with pytest.raises(AirflowException, match="Some task instances failed"): - run_job(job=job, execute_callable=job_runner._execute) - - dr.refresh_from_db() - - assert dr.state == State.FAILED - - tis = dr.get_task_instances() - for ti in tis: - if ti.task_id in (op1.task_id, op4.task_id, op6.task_id): - assert ti.state == State.SUCCESS - elif ti.task_id == op2.task_id: - assert ti.state == State.FAILED - elif ti.task_id == op3.task_id: - assert ti.state == State.SKIPPED - elif ti.task_id == op5.task_id: - assert ti.state == State.UPSTREAM_FAILED - - def test_update_counters(self, dag_maker, session): - with dag_maker(dag_id="test_manage_executor_state", start_date=DEFAULT_DATE, session=session) as dag: - task1 = EmptyOperator(task_id="dummy", owner="airflow") - dr = dag_maker.create_dagrun(state=None) - job = Job() - job_runner = BackfillJobRunner(job=job, dag=dag) - ti = TI(task1, run_id=dr.run_id) - ti.refresh_from_db() - - ti_status = BackfillJobRunner._DagRunTaskStatus() - - # Test for success - # The in-memory task key in ti_status.running contains a try_number - # that is not in sync with the DB. To test that _update_counters method - # handles this, we mark the task as running in-memory and then increase - # the try number as it would be before the raw task is executed. - # When updating the counters the in-memory key will be used which will - # match what's in the in-memory ti_status.running map. This is the same - # for skipped, failed and retry states. - ti_status.running[ti.key] = ti # Task is queued and marked as running - ti.try_number += 1 - ti.set_state(State.SUCCESS, session) # Task finishes with success state - job_runner._update_counters(ti_status=ti_status, session=session) # Update counters - assert len(ti_status.running) == 0 - assert len(ti_status.succeeded) == 1 - assert len(ti_status.skipped) == 0 - assert len(ti_status.failed) == 0 - assert len(ti_status.to_run) == 0 - - ti_status.succeeded.clear() - - # Test for success when DB try_number is off from in-memory expectations - ti_status.running[ti.key] = ti - ti.try_number += 2 - ti.set_state(State.SUCCESS, session) - job_runner._update_counters(ti_status=ti_status, session=session) - assert len(ti_status.running) == 0 - assert len(ti_status.succeeded) == 1 - assert len(ti_status.skipped) == 0 - assert len(ti_status.failed) == 0 - assert len(ti_status.to_run) == 0 - - ti_status.succeeded.clear() - - # Test for skipped - ti_status.running[ti.key] = ti - ti.try_number += 1 - ti.set_state(State.SKIPPED, session) - job_runner._update_counters(ti_status=ti_status, session=session) - assert len(ti_status.running) == 0 - assert len(ti_status.succeeded) == 0 - assert len(ti_status.skipped) == 1 - assert len(ti_status.failed) == 0 - assert len(ti_status.to_run) == 0 - - ti_status.skipped.clear() - - # Test for failed - ti_status.running[ti.key] = ti - ti.try_number += 1 - ti.set_state(State.FAILED, session) - job_runner._update_counters(ti_status=ti_status, session=session) - assert len(ti_status.running) == 0 - assert len(ti_status.succeeded) == 0 - assert len(ti_status.skipped) == 0 - assert len(ti_status.failed) == 1 - assert len(ti_status.to_run) == 0 - - ti_status.failed.clear() - - # Test for retry - ti_status.running[ti.key] = ti - ti.try_number += 1 - ti.set_state(State.UP_FOR_RETRY, session) - job_runner._update_counters(ti_status=ti_status, session=session) - assert len(ti_status.running) == 0 - assert len(ti_status.succeeded) == 0 - assert len(ti_status.skipped) == 0 - assert len(ti_status.failed) == 0 - assert len(ti_status.to_run) == 1 - - ti_status.to_run.clear() - - # Test for reschedule - # Logic in taskinstance reduces the try number for a task that's been - # rescheduled (which makes sense because it's the _same_ try, but it's - # just being rescheduled to a later time). This now makes the in-memory - # and DB representation of the task try_number the _same_, which is unlike - # the above cases. But this is okay because the in-memory key is used. - ti_status.running[ti.key] = ti # Task queued and marked as running - ti.set_state(State.UP_FOR_RESCHEDULE, session) # Task finishes with reschedule state - job_runner._update_counters(ti_status=ti_status, session=session) - assert len(ti_status.running) == 0 - assert len(ti_status.succeeded) == 0 - assert len(ti_status.skipped) == 0 - assert len(ti_status.failed) == 0 - assert len(ti_status.to_run) == 1 - - ti_status.to_run.clear() - - # test for none - ti.set_state(State.NONE, session) - session.merge(ti) - session.commit() - ti_status.running[ti.key] = ti - job_runner._update_counters(ti_status=ti_status, session=session) - assert len(ti_status.running) == 0 - assert len(ti_status.succeeded) == 0 - assert len(ti_status.skipped) == 0 - assert len(ti_status.failed) == 0 - assert len(ti_status.to_run) == 1 - - ti_status.to_run.clear() - - # test for scheduled - ti.set_state(State.SCHEDULED) - # Deferred tasks are put into scheduled by the triggerer - # Check that they are put into to_run - ti_status.running[ti.key] = ti - job_runner._update_counters(ti_status=ti_status, session=session) - assert len(ti_status.running) == 0 - assert len(ti_status.succeeded) == 0 - assert len(ti_status.skipped) == 0 - assert len(ti_status.failed) == 0 - assert len(ti_status.to_run) == 1 - - ti_status.to_run.clear() - # test for deferred - # if a task is deferred and it's not yet time for the triggerer - # to reschedule it, we should leave it in ti_status.running - ti.set_state(State.DEFERRED) - ti_status.running[ti.key] = ti - job_runner._update_counters(ti_status=ti_status, session=session) - assert len(ti_status.running) == 1 - assert len(ti_status.succeeded) == 0 - assert len(ti_status.skipped) == 0 - assert len(ti_status.failed) == 0 - assert len(ti_status.to_run) == 0 - session.close() - - def test_dag_dagrun_infos_between(self, dag_maker): - with dag_maker( - dag_id="dagrun_infos_between", start_date=DEFAULT_DATE, schedule="@hourly" - ) as test_dag: - EmptyOperator( - task_id="dummy", - owner="airflow", - ) - - assert [DEFAULT_DATE] == [ - info.logical_date - for info in test_dag.iter_dagrun_infos_between( - earliest=DEFAULT_DATE, - latest=DEFAULT_DATE, - ) - ] - assert [ - DEFAULT_DATE - datetime.timedelta(hours=3), - DEFAULT_DATE - datetime.timedelta(hours=2), - DEFAULT_DATE - datetime.timedelta(hours=1), - DEFAULT_DATE, - ] == [ - info.logical_date - for info in test_dag.iter_dagrun_infos_between( - earliest=DEFAULT_DATE - datetime.timedelta(hours=3), - latest=DEFAULT_DATE, - ) - ] - - def test_backfill_run_backwards(self, mock_executor): - dag = self.dagbag.get_dag("test_start_date_scheduling") - dag.clear() - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=1), - run_backwards=True, - ) - run_job(job=job, execute_callable=job_runner._execute) - - session = settings.Session() - tis = ( - session.query(TI) - .join(TI.dag_run) - .filter(TI.dag_id == "test_start_date_scheduling" and TI.task_id == "dummy") - .order_by(DagRun.execution_date) - .all() - ) - - queued_times = [ti.queued_dttm for ti in tis] - assert queued_times == sorted(queued_times, reverse=True) - assert all(ti.state == State.SUCCESS for ti in tis) - - dag.clear() - session.close() - - def test_reset_orphaned_tasks_with_orphans(self, dag_maker): - """Create dagruns and ensure only ones with correct states are reset.""" - prefix = "backfill_job_test_test_reset_orphaned_tasks" - states = [State.QUEUED, State.SCHEDULED, State.NONE, State.RUNNING, State.SUCCESS] - states_to_reset = [State.QUEUED, State.SCHEDULED, State.NONE] - triggered_by_kwargs = {"triggered_by": DagRunTriggeredByType.TEST} if AIRFLOW_V_3_0_PLUS else {} - - tasks = [] - with dag_maker(dag_id=prefix) as dag: - for i in range(len(states)): - task_id = f"{prefix}_task_{i}" - task = EmptyOperator(task_id=task_id) - tasks.append(task) - - session = settings.Session() - job = Job() - job_runner = BackfillJobRunner(job=job, dag=dag) - # create dagruns - dr1 = dag_maker.create_dagrun(run_id=DEFAULT_DAG_RUN_ID, state=State.RUNNING) - dr2 = dag.create_dagrun(run_id="test2", state=State.SUCCESS, **triggered_by_kwargs) - - # create taskinstances and set states - dr1_tis = [] - dr2_tis = [] - for task, state in zip(tasks, states): - ti1 = TI(task, run_id=dr1.run_id) - ti2 = TI(task, run_id=dr2.run_id) - ti1.refresh_from_db() - ti2.refresh_from_db() - ti1.state = state - ti2.state = state - dr1_tis.append(ti1) - dr2_tis.append(ti2) - session.merge(ti1) - session.merge(ti2) - session.commit() - - assert 2 == job_runner.reset_state_for_orphaned_tasks() - - for ti in dr1_tis + dr2_tis: - ti.refresh_from_db() - - # running dagrun should be reset - for state, ti in zip(states, dr1_tis): - if state in states_to_reset: - assert ti.state is None - else: - assert state == ti.state - - # otherwise not - for state, ti in zip(states, dr2_tis): - assert state == ti.state - - for state, ti in zip(states, dr1_tis): - ti.state = state - session.commit() - - job_runner.reset_state_for_orphaned_tasks(filter_by_dag_run=dr1, session=session) - - # check same for dag_run version - for state, ti in zip(states, dr2_tis): - assert state == ti.state - - def test_reset_orphaned_tasks_specified_dagrun(self, session, dag_maker): - """Try to reset when we specify a dagrun and ensure nothing else is.""" - dag_id = "test_reset_orphaned_tasks_specified_dagrun" - task_id = dag_id + "_task" - with dag_maker( - dag_id=dag_id, - start_date=DEFAULT_DATE, - schedule="@daily", - session=session, - ) as dag: - EmptyOperator(task_id=task_id, dag=dag) - - job = Job() - job_runner = BackfillJobRunner(job=job, dag=dag) - # make two dagruns, only reset for one - triggered_by_kwargs = {"triggered_by": DagRunTriggeredByType.TEST} if AIRFLOW_V_3_0_PLUS else {} - dr1 = dag_maker.create_dagrun(state=State.SUCCESS, **triggered_by_kwargs) - dr2 = dag.create_dagrun( - run_id="test2", - state=State.RUNNING, - session=session, - **triggered_by_kwargs, - ) - ti1 = dr1.get_task_instances(session=session)[0] - ti2 = dr2.get_task_instances(session=session)[0] - ti1.state = State.SCHEDULED - ti2.state = State.SCHEDULED - - session.merge(ti1) - session.merge(ti2) - session.merge(dr1) - session.merge(dr2) - session.flush() - - num_reset_tis = job_runner.reset_state_for_orphaned_tasks(filter_by_dag_run=dr2, session=session) - assert 1 == num_reset_tis - ti1.refresh_from_db(session=session) - ti2.refresh_from_db(session=session) - assert State.SCHEDULED == ti1.state - assert State.NONE == ti2.state - - def test_job_id_is_assigned_to_dag_run(self, dag_maker, mock_executor): - dag_id = "test_job_id_is_assigned_to_dag_run" - with dag_maker(dag_id=dag_id, start_date=DEFAULT_DATE, schedule="@daily") as dag: - EmptyOperator(task_id="dummy_task", dag=dag) - - job = Job() - job_runner = BackfillJobRunner( - job=job, dag=dag, start_date=timezone.utcnow() - datetime.timedelta(days=1) - ) - run_job(job=job, execute_callable=job_runner._execute) - dr: DagRun = dag.get_last_dagrun() - assert dr.creating_job_id == job.id - - def test_executor_lifecycle(self, dag_maker, mock_executors): - """Ensure that all executors go through the full lifecycle of start, heartbeat, end, etc""" - dag_id = "test_executor_lifecycle" - with dag_maker(dag_id=dag_id, start_date=DEFAULT_DATE, schedule="@daily") as dag: - EmptyOperator(task_id="dummy_task", dag=dag) - - job = Job() - job_runner = BackfillJobRunner( - job=job, dag=dag, start_date=timezone.utcnow() - datetime.timedelta(days=1) - ) - run_job(job=job, execute_callable=job_runner._execute) - - for executor_mock in mock_executors: - assert executor_mock.job_id == job.id - executor_mock.start.assert_called_once() - executor_mock.heartbeat.assert_called_once() - executor_mock.end.assert_called_once() - - def test_non_existing_executor(self, dag_maker, mock_executors): - dag_id = "test_non_existing_executor" - with dag_maker(dag_id=dag_id, start_date=DEFAULT_DATE, schedule="@daily") as dag: - EmptyOperator(task_id="dummy_task", dag=dag, executor="foobar") - - job = Job() - job_runner = BackfillJobRunner( - job=job, dag=dag, start_date=timezone.utcnow() - datetime.timedelta(days=1) - ) - # Executor "foobar" does not exist, so the Backfill job should fail to run those tasks and - # throw an UnknownExecutorException - with pytest.raises(UnknownExecutorException): - run_job(job=job, execute_callable=job_runner._execute) - - def test_hybrid_executors(self, dag_maker, mock_executors, session): - dag_id = "test_hybrid_executors" - with dag_maker(dag_id=dag_id, start_date=DEFAULT_DATE, schedule="@daily") as dag: - EmptyOperator(task_id="default_exec", dag=dag) - EmptyOperator(task_id="default_exec_explicit", dag=dag, executor=mock_executors[0].name.alias) - EmptyOperator(task_id="secondary_exec", dag=dag, executor=mock_executors[1].name.alias) - - job = Job() - job_runner = BackfillJobRunner( - job=job, dag=dag, start_date=timezone.utcnow() - datetime.timedelta(days=1) - ) - - with mock.patch("airflow.executors.executor_loader.ExecutorLoader.lookup_executor_name_by_str"): - run_job(job=job, execute_callable=job_runner._execute) - - dr = DagRun.find(dag_id=dag.dag_id, session=session)[0] - - call_list = mock_executors[0].queue_task_instance.call_args_list - assert len(call_list) == 2 - assert call_list[0].args[0].task_id == "default_exec" - assert call_list[1].args[0].task_id == "default_exec_explicit" - - call_list = mock_executors[1].queue_task_instance.call_args_list - assert len(call_list) == 1 - assert call_list[0].args[0].task_id == "secondary_exec" - - assert dr - assert dr.state == DagRunState.SUCCESS - - # Check that every task has a start and end date - for ti in dr.task_instances: - assert ti.state == TaskInstanceState.SUCCESS - assert ti.start_date is not None - assert ti.end_date is not None - - def test_backfill_has_job_id_int(self, mock_executor): - """Make sure that backfill jobs are assigned job_ids and that the job_id is an int.""" - dag = self.dagbag.get_dag("test_start_date_scheduling") - dag.clear() - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE + datetime.timedelta(days=1), - run_backwards=True, - ) - run_job(job=job, execute_callable=job_runner._execute) - assert isinstance(job.executor.job_id, int) - - @pytest.mark.long_running - @pytest.mark.parametrize("executor", [SequentialExecutor, DebugExecutor]) - @pytest.mark.parametrize("dag_id", ["test_mapped_classic", "test_mapped_taskflow", "test_sensor"]) - def test_backfilling_dags(self, dag_id, executor, session): - """ - End-to-end test for backfilling dags with various executors. - - We test with multiple executors as they have different "execution environments" -- for instance - DebugExecutor runs a lot more in the same process than other Executors. - - """ - # This test needs a real executor to run, so that the `make_list` task can write out the TaskMap - for _ in _mock_executor(executor): - self.dagbag.process_file(str(TEST_DAGS_FOLDER / f"{dag_id}.py")) - dag = self.dagbag.get_dag(dag_id) - - when = timezone.datetime(2022, 1, 1) - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=when, - end_date=when, - donot_pickle=True, - ) - run_job(job=job, execute_callable=job_runner._execute) - - dr = DagRun.find(dag_id=dag.dag_id, execution_date=when, session=session)[0] - assert dr - assert dr.state == DagRunState.SUCCESS - - # Check that every task has a start and end date - for ti in dr.task_instances: - assert ti.state == TaskInstanceState.SUCCESS - assert ti.start_date is not None - assert ti.end_date is not None - - def test_mapped_dag_pre_existing_tis(self, dag_maker, session, mock_executor): - """If the DagRun already has some mapped TIs, ensure that we re-run them successfully""" - from airflow.decorators import task - from airflow.operators.python import PythonOperator - - list_result = [[1], [2], [{"a": "b"}]] - - @task - def make_arg_lists(): - return list_result - - def consumer(value): - print(repr(value)) - - with dag_maker(session=session) as dag: - consumer_op = PythonOperator.partial(task_id="consumer", python_callable=consumer).expand( - op_args=make_arg_lists() - ) - PythonOperator.partial(task_id="consumer_literal", python_callable=consumer).expand( - op_args=[[1], [2], [3]], - ) - - dr = dag_maker.create_dagrun() - - # Create the existing mapped TIs -- this the crucial part of this test - ti = dr.get_task_instance("consumer", session=session) - ti.map_index = 0 - for map_index in range(1, 3): - ti = TI(consumer_op, run_id=dr.run_id, map_index=map_index) - session.add(ti) - ti.dag_run = dr - session.flush() - - executor = mock_executor - - ti_status = BackfillJobRunner._DagRunTaskStatus() - ti_status.active_runs.add(dr) - ti_status.to_run = {ti.key: ti for ti in dr.task_instances} - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=dr.execution_date, - end_date=dr.execution_date, - donot_pickle=True, - ) - - executor_change_state = executor.change_state - - def on_change_state(key, state, info=None): - if key.task_id == "make_arg_lists": - session.add( - TaskMap( - length=len(list_result), - keys=None, - dag_id=key.dag_id, - run_id=key.run_id, - task_id=key.task_id, - map_index=key.map_index, - ) - ) - session.flush() - executor_change_state(key, state, info) - - with patch.object(executor, "change_state", side_effect=on_change_state): - job_runner._process_backfill_task_instances( - ti_status=ti_status, - start_date=dr.execution_date, - pickle_id=None, - session=session, - ) - assert ti_status.failed == set() - assert ti_status.succeeded == { - TaskInstanceKey(dag_id=dr.dag_id, task_id="consumer", run_id="test", try_number=0, map_index=0), - TaskInstanceKey(dag_id=dr.dag_id, task_id="consumer", run_id="test", try_number=0, map_index=1), - TaskInstanceKey(dag_id=dr.dag_id, task_id="consumer", run_id="test", try_number=0, map_index=2), - TaskInstanceKey( - dag_id=dr.dag_id, task_id="consumer_literal", run_id="test", try_number=0, map_index=0 - ), - TaskInstanceKey( - dag_id=dr.dag_id, task_id="consumer_literal", run_id="test", try_number=0, map_index=1 - ), - TaskInstanceKey( - dag_id=dr.dag_id, task_id="consumer_literal", run_id="test", try_number=0, map_index=2 - ), - TaskInstanceKey( - dag_id=dr.dag_id, task_id="make_arg_lists", run_id="test", try_number=0, map_index=-1 - ), - } - - def test_mapped_dag_unexpandable(self, dag_maker, session, mock_executor): - with dag_maker(session=session) as dag: - - @dag.task - def get_things(): - return [1, 2] - - @dag.task - def this_fails() -> None: - raise RuntimeError("sorry!") - - @dag.task(trigger_rule=TriggerRule.ALL_DONE) - def consumer(a, b): - print(a, b) - - consumer.expand(a=get_things(), b=this_fails()) - - when = timezone.datetime(2022, 1, 1) - job = Job() - job_runner = BackfillJobRunner(job=job, dag=dag, start_date=when, end_date=when, donot_pickle=True) - run_job(job=job, execute_callable=job_runner._execute) - (dr,) = DagRun.find(dag_id=dag.dag_id, execution_date=when, session=session) - assert dr.state == DagRunState.FAILED - - # Check that every task has a start and end date - tis = {(ti.task_id, ti.map_index): ti for ti in dr.task_instances} - assert len(tis) == 3 - tis[("get_things", -1)].state == TaskInstanceState.SUCCESS - tis[("this_fails", -1)].state == TaskInstanceState.FAILED - tis[("consumer", -1)].state == TaskInstanceState.UPSTREAM_FAILED - - def test_start_date_set_for_resetted_dagruns(self, dag_maker, session, caplog, mock_executor): - with dag_maker() as dag: - EmptyOperator(task_id="task1") - - dr = dag_maker.create_dagrun() - dr.state = State.SUCCESS - session.merge(dr) - session.flush() - dag.clear() - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE, - donot_pickle=True, - ) - run_job(job=job, execute_callable=job_runner._execute) - - (dr,) = DagRun.find(dag_id=dag.dag_id, execution_date=DEFAULT_DATE, session=session) - assert dr.start_date - assert f"Failed to record duration of {dr}" not in caplog.text - - def test_task_instances_are_not_set_to_scheduled_when_dagrun_reset( - self, dag_maker, session, mock_executor - ): - """Test that when dagrun is reset, task instances are not set to scheduled""" - - with dag_maker() as dag: - task1 = EmptyOperator(task_id="task1") - task2 = EmptyOperator(task_id="task2") - task3 = EmptyOperator(task_id="task3") - task1 >> task2 >> task3 - - for i in range(1, 4): - dag_maker.create_dagrun( - run_id=f"test_dagrun_{i}", execution_date=DEFAULT_DATE + datetime.timedelta(days=i) - ) - - dag.clear() - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE + datetime.timedelta(days=1), - end_date=DEFAULT_DATE + datetime.timedelta(days=4), - donot_pickle=True, - ) - for dr in DagRun.find(dag_id=dag.dag_id, session=session): - tasks_to_run = job_runner._task_instances_for_dag_run(dag, dr, session=session) - states = [ti.state for _, ti in tasks_to_run.items()] - assert TaskInstanceState.SCHEDULED in states - assert State.NONE in states - - @pytest.mark.parametrize( - ["disable_retry", "try_number", "exception"], - ( - (True, 1, BackfillUnfinished), - (False, 2, AirflowException), - ), - ) - def test_backfill_disable_retry(self, dag_maker, disable_retry, try_number, exception, mock_executor): - with dag_maker( - dag_id="test_disable_retry", - schedule="@daily", - default_args={ - "retries": 2, - "retry_delay": datetime.timedelta(seconds=3), - }, - ) as dag: - task1 = EmptyOperator(task_id="task1") - dag_run = dag_maker.create_dagrun(state=None) - - executor = mock_executor - executor.parallelism = 16 - executor.mock_task_results[ - TaskInstanceKey(dag.dag_id, task1.task_id, dag_run.run_id, try_number=1) - ] = TaskInstanceState.UP_FOR_RETRY - executor.mock_task_results[ - TaskInstanceKey(dag.dag_id, task1.task_id, dag_run.run_id, try_number=2) - ] = TaskInstanceState.FAILED - - job = Job() - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE, - disable_retry=disable_retry, - ) - with pytest.raises(exception): - run_job(job=job, execute_callable=job_runner._execute) - ti = dag_run.get_task_instance(task_id=task1.task_id) - - assert ti.try_number == try_number - - dag_run.refresh_from_db() - - assert dag_run.state == DagRunState.FAILED - - dag.clear() - - # Qyarantined issue tracked in https://github.com/apache/airflow/issues/39858 - @pytest.mark.quarantined - def test_backfill_failed_dag_with_upstream_failed_task(self, dag_maker): - self.dagbag.process_file(str(TEST_DAGS_FOLDER / "test_backfill_with_upstream_failed_task.py")) - dag = self.dagbag.get_dag("test_backfill_with_upstream_failed_task") - - # We have to use the "fake" version of perform_heartbeat due to the 'is_unit_test' check in - # the original one. However, instead of using the original version of perform_heartbeat, - # we can simply wait for a LocalExecutor's worker cycle. The approach with sleep works well now, - # but it can be replaced with checking the state of the LocalTaskJob. - def fake_perform_heartbeat(*args, **kwargs): - import time - - time.sleep(1) - - with mock.patch("airflow.jobs.backfill_job_runner.perform_heartbeat", fake_perform_heartbeat): - job = Job(executor=ExecutorLoader.load_executor("LocalExecutor")) - job_runner = BackfillJobRunner( - job=job, - dag=dag, - start_date=DEFAULT_DATE, - end_date=DEFAULT_DATE, - rerun_failed_tasks=True, - ) - with pytest.raises(BackfillUnfinished): - run_job(job=job, execute_callable=job_runner._execute) - - dr: DagRun = dag.get_last_dagrun() - assert dr.state == State.FAILED diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index 78c3acdce0de6..18ca7c63320f0 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -48,7 +48,6 @@ from airflow.executors.base_executor import BaseExecutor from airflow.executors.executor_constants import MOCK_EXECUTOR from airflow.executors.executor_loader import ExecutorLoader -from airflow.jobs.backfill_job_runner import BackfillJobRunner from airflow.jobs.job import Job, run_job from airflow.jobs.local_task_job_runner import LocalTaskJobRunner from airflow.jobs.scheduler_job_runner import SchedulerJobRunner @@ -60,16 +59,16 @@ from airflow.models.db_callback_request import DbCallbackRequest from airflow.models.pool import Pool from airflow.models.serialized_dag import SerializedDagModel -from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance, TaskInstanceKey +from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance from airflow.operators.empty import EmptyOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.serialization.serialized_objects import SerializedDAG +from airflow.timetables.base import DataInterval from airflow.utils import timezone from airflow.utils.file import list_py_file_paths from airflow.utils.session import create_session, provide_session from airflow.utils.state import DagRunState, JobState, State, TaskInstanceState from airflow.utils.types import DagRunType -from tests.jobs.test_backfill_job import _mock_executor from tests.listeners import dag_listener from tests.listeners.test_listeners import get_listener_manager from tests.models import TEST_DAGS_FOLDER @@ -2916,22 +2915,16 @@ def test_scheduler_start_date(self, configs): # because it would take the most recent run and start from there # That behavior still exists, but now it will only do so if after the # start date - bf_exec = MockExecutor() - for _ in _mock_executor(bf_exec): - backfill_job = Job() - job_runner = BackfillJobRunner( - job=backfill_job, dag=dag, start_date=DEFAULT_DATE, end_date=DEFAULT_DATE - ) - run_job(job=backfill_job, execute_callable=job_runner._execute) - - # one task ran + dag.create_dagrun( + state="success", + triggered_by=DagRunTriggeredByType.TIMETABLE, + run_id="abc123", + execution_date=DEFAULT_DATE, + run_type=DagRunType.BACKFILL_JOB, + data_interval=DataInterval(DEFAULT_DATE, DEFAULT_DATE + timedelta(days=1)), + ) + # one task "ran" assert len(session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id).all()) == 1 - assert [ - ( - TaskInstanceKey(dag.dag_id, "dummy", f"backfill__{DEFAULT_DATE.isoformat()}", 1), - (State.SUCCESS, None), - ), - ] == bf_exec.sorted_tasks session.commit() scheduler_job = Job( From 7ae3a5519446c6e3e090ec44036883764a3dffc5 Mon Sep 17 00:00:00 2001 From: olegkachur-e Date: Mon, 14 Oct 2024 03:49:35 +0200 Subject: [PATCH 104/125] Add early job_id xcom_push for google provider Beam Pipeline operators (#42982) - To let GCP Beam Sensor operators 'sense' the pipeline changes, by having dataflow job_id been xcom_push as soon as it available. Related issue: https://github.com/apache/airflow/issues/30007. Co-authored-by: Oleg Kachur --- .../providers/apache/beam/operators/beam.py | 19 +++++++++++++++-- .../tests/apache/beam/operators/test_beam.py | 21 +++++++++++++++++++ 2 files changed, 38 insertions(+), 2 deletions(-) diff --git a/providers/src/airflow/providers/apache/beam/operators/beam.py b/providers/src/airflow/providers/apache/beam/operators/beam.py index 41c55ede2a5bc..65f23336589d2 100644 --- a/providers/src/airflow/providers/apache/beam/operators/beam.py +++ b/providers/src/airflow/providers/apache/beam/operators/beam.py @@ -187,7 +187,20 @@ def __init__( self.gcp_conn_id = gcp_conn_id self.beam_hook: BeamHook self.dataflow_hook: DataflowHook | None = None - self.dataflow_job_id: str | None = None + self._dataflow_job_id: str | None = None + self._execute_context: Context | None = None + + @property + def dataflow_job_id(self): + return self._dataflow_job_id + + @dataflow_job_id.setter + def dataflow_job_id(self, new_value): + if all([new_value, not self._dataflow_job_id, self._execute_context]): + # push job_id as soon as it's ready, to let Sensors work before the job finished + # and job_id pushed as returned value item. + self.xcom_push(context=self._execute_context, key="dataflow_job_id", value=new_value) + self._dataflow_job_id = new_value def _cast_dataflow_config(self): if isinstance(self.dataflow_config, dict): @@ -346,6 +359,7 @@ def __init__( def execute(self, context: Context): """Execute the Apache Beam Python Pipeline.""" + self._execute_context = context self._cast_dataflow_config() self.pipeline_options.setdefault("labels", {}).update( {"airflow-version": "v" + version.replace(".", "-").replace("+", "-")} @@ -540,6 +554,7 @@ def __init__( def execute(self, context: Context): """Execute the Apache Beam Python Pipeline.""" + self._execute_context = context self._cast_dataflow_config() ( self.is_dataflow, @@ -738,7 +753,7 @@ def execute(self, context: Context): """Execute the Apache Beam Pipeline.""" if not exactly_one(self.go_file, self.launcher_binary): raise ValueError("Exactly one of `go_file` and `launcher_binary` must be set") - + self._execute_context = context self._cast_dataflow_config() if self.dataflow_config.impersonation_chain: self.log.warning( diff --git a/providers/tests/apache/beam/operators/test_beam.py b/providers/tests/apache/beam/operators/test_beam.py index 6d1b4b5d1b958..fd2e706c29414 100644 --- a/providers/tests/apache/beam/operators/test_beam.py +++ b/providers/tests/apache/beam/operators/test_beam.py @@ -110,6 +110,27 @@ def test_async_execute_logging_should_execute_successfully(self, caplog): ) assert f"{TASK_ID} completed with response Pipeline has finished SUCCESSFULLY" in caplog.text + def test_early_dataflow_id_xcom_push(self, default_options, pipeline_options): + with mock.patch.object(BeamBasePipelineOperator, "xcom_push") as mock_xcom_push: + op = BeamBasePipelineOperator( + **self.default_op_kwargs, + default_pipeline_options=copy.deepcopy(default_options), + pipeline_options=copy.deepcopy(pipeline_options), + dataflow_config={}, + ) + sample_df_job_id = "sample_df_job_id_value" + op._execute_context = MagicMock() + + assert op.dataflow_job_id is None + + op.dataflow_job_id = sample_df_job_id + mock_xcom_push.assert_called_once_with( + context=op._execute_context, key="dataflow_job_id", value=sample_df_job_id + ) + mock_xcom_push.reset_mock() + op.dataflow_job_id = "sample_df_job_same_value_id" + mock_xcom_push.assert_not_called() + class TestBeamRunPythonPipelineOperator: @pytest.fixture(autouse=True) From 72da8f6b6082ad78faf8706705bad3d01d13b2fc Mon Sep 17 00:00:00 2001 From: Daniel Standish <15932138+dstandish@users.noreply.github.com> Date: Sun, 13 Oct 2024 19:40:48 -0700 Subject: [PATCH 105/125] Add logic to mark backfills as complete (#42683) Periodically check for backfills that should be marked as complete. --- airflow/jobs/scheduler_job_runner.py | 30 +++++++++++++++++++++++++++- tests/jobs/test_scheduler_job.py | 29 +++++++++++++++++++++++++++ 2 files changed, 58 insertions(+), 1 deletion(-) diff --git a/airflow/jobs/scheduler_job_runner.py b/airflow/jobs/scheduler_job_runner.py index 2999ed391bc94..f6821f57aa83c 100644 --- a/airflow/jobs/scheduler_job_runner.py +++ b/airflow/jobs/scheduler_job_runner.py @@ -30,7 +30,7 @@ from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, Collection, Iterable, Iterator -from sqlalchemy import and_, delete, func, not_, or_, select, text, update +from sqlalchemy import and_, delete, exists, func, not_, or_, select, text, update from sqlalchemy.exc import OperationalError from sqlalchemy.orm import lazyload, load_only, make_transient, selectinload from sqlalchemy.sql import expression @@ -51,6 +51,7 @@ DagScheduleAssetReference, TaskOutletAssetReference, ) +from airflow.models.backfill import Backfill from airflow.models.dag import DAG, DagModel from airflow.models.dagbag import DagBag from airflow.models.dagrun import DagRun @@ -1063,6 +1064,11 @@ def _run_scheduler_loop(self) -> None: self.check_trigger_timeouts, ) + timers.call_regular_interval( + 30, + self._mark_backfills_complete, + ) + timers.call_regular_interval( conf.getfloat("scheduler", "pool_metrics_interval", fallback=5.0), self._emit_pool_metrics, @@ -1288,6 +1294,28 @@ def _create_dagruns_for_dags(self, guard: CommitProhibitorGuard, session: Sessio guard.commit() # END: create dagruns + @provide_session + def _mark_backfills_complete(self, session: Session = NEW_SESSION) -> None: + """Mark completed backfills as completed.""" + self.log.debug("checking for completed backfills.") + unfinished_states = (DagRunState.RUNNING, DagRunState.QUEUED) + now = timezone.utcnow() + # todo: AIP-78 simplify this function to an update statement + query = select(Backfill).where( + Backfill.completed_at.is_(None), + ~exists( + select(DagRun.id).where( + and_(DagRun.backfill_id == Backfill.id, DagRun.state.in_(unfinished_states)) + ) + ), + ) + backfills = session.scalars(query).all() + if not backfills: + return + self.log.info("marking %s backfills as complete", len(backfills)) + for b in backfills: + b.completed_at = now + @add_span def _create_dag_runs(self, dag_models: Collection[DagModel], session: Session) -> None: """Create a DAG run and update the dag_model to control if/when the next DAGRun should be created.""" diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index 18ca7c63320f0..7eae1639d0e1b 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -6399,3 +6399,32 @@ def test_process_dags_queries_count( prefix = "Collected database query count mismatches:" joined = "\n\n".join(failures) raise AssertionError(f"{prefix}\n\n{joined}") + + +def test_mark_backfills_completed(dag_maker, session): + clear_db_backfills() + with dag_maker(serialized=True, dag_id="test_mark_backfills_completed", schedule="@daily") as dag: + BashOperator(task_id="hi", bash_command="echo hi") + b = _create_backfill( + dag_id=dag.dag_id, + from_date=pendulum.parse("2021-01-01"), + to_date=pendulum.parse("2021-01-03"), + max_active_runs=10, + reverse=False, + dag_run_conf={}, + ) + session.expunge_all() + runner = SchedulerJobRunner( + job=Job(job_type=SchedulerJobRunner.job_type, executor=MockExecutor(do_update=False)) + ) + runner._mark_backfills_complete() + b = session.get(Backfill, b.id) + assert b.completed_at is None + session.expunge_all() + drs = session.scalars(select(DagRun).where(DagRun.dag_id == dag.dag_id)) + for dr in drs: + dr.state = DagRunState.SUCCESS + session.commit() + runner._mark_backfills_complete() + b = session.get(Backfill, b.id) + assert b.completed_at.timestamp() > 0 From baa87b600b6796818d1ea3e4da5f98f19834be56 Mon Sep 17 00:00:00 2001 From: Kalyan R Date: Mon, 14 Oct 2024 14:46:59 +0530 Subject: [PATCH 106/125] fix mypy check failure on main (#42976) * fix mypy error * Apply suggestions from code review * Update conftest.py Co-authored-by: Jarek Potiuk --------- Co-authored-by: Kaxil Naik Co-authored-by: Jarek Potiuk --- task_sdk/tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/task_sdk/tests/conftest.py b/task_sdk/tests/conftest.py index 209fad46d7d62..7839b299eef2c 100644 --- a/task_sdk/tests/conftest.py +++ b/task_sdk/tests/conftest.py @@ -23,4 +23,4 @@ @pytest.hookimpl(tryfirst=True) def pytest_configure(config: pytest.Config) -> None: - config.inicfg["airflow_deprecations_ignore"] = () + config.inicfg["airflow_deprecations_ignore"] = [] From c5e4a74cfc9c828f845cbe83239464669efa90d5 Mon Sep 17 00:00:00 2001 From: GPK Date: Mon, 14 Oct 2024 10:30:37 +0100 Subject: [PATCH 107/125] mark test_setup_constraint_mapped_task_upstream_removed_and_success as flaky (#42997) --- tests/ti_deps/deps/test_trigger_rule_dep.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/ti_deps/deps/test_trigger_rule_dep.py b/tests/ti_deps/deps/test_trigger_rule_dep.py index 80cb400a46d77..9f3699d09c33d 100644 --- a/tests/ti_deps/deps/test_trigger_rule_dep.py +++ b/tests/ti_deps/deps/test_trigger_rule_dep.py @@ -1494,6 +1494,7 @@ def w2(): assert self.get_ti(dr, "w2").state == expected +@pytest.mark.flaky(reruns=5) @pytest.mark.parametrize( "map_index, flag_upstream_failed, expected_ti_state", [(2, True, None), (3, True, REMOVED), (4, True, REMOVED), (3, False, None)], From 85c2b7d722d0932c7568caaf371f22317d34f671 Mon Sep 17 00:00:00 2001 From: Elad Kalif <45845474+eladkal@users.noreply.github.com> Date: Mon, 14 Oct 2024 21:12:31 +0700 Subject: [PATCH 108/125] Update providers metadata 2024-10-14 (#42995) --- generated/provider_metadata.json | 358 ++++++++++++++++++++----------- 1 file changed, 227 insertions(+), 131 deletions(-) diff --git a/generated/provider_metadata.json b/generated/provider_metadata.json index 56199e2f82c3d..a692eb72ae9e5 100644 --- a/generated/provider_metadata.json +++ b/generated/provider_metadata.json @@ -1,11 +1,11 @@ { "airbyte": { "1.0.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-04-06T23:45:20Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { @@ -205,15 +205,15 @@ "date_released": "2021-02-27T14:47:19Z" }, "1.3.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.0.0", "date_released": "2021-04-06T23:45:20Z" }, "1.4.0": { - "associated_airflow_version": "2.1.0", + "associated_airflow_version": "2.1.1", "date_released": "2021-05-01T09:04:42Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { @@ -447,23 +447,27 @@ "8.29.0": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "9.0.0": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:52Z" } }, "apache.beam": { "1.0.0": { - "associated_airflow_version": "2.0.1", + "associated_airflow_version": "2.1.0", "date_released": "2021-02-04T09:11:00Z" }, "1.0.1": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.0", "date_released": "2021-03-08T19:27:03Z" }, "2.0.0": { - "associated_airflow_version": "2.1.0", + "associated_airflow_version": "2.1.1", "date_released": "2021-05-01T09:04:42Z" }, "3.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "3.0.1": { @@ -581,6 +585,10 @@ "5.8.0": { "associated_airflow_version": "2.10.1", "date_released": "2024-08-22T10:37:58Z" + }, + "5.8.1": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:52Z" } }, "apache.cassandra": { @@ -593,7 +601,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -777,11 +785,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.1.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-03-08T19:27:03Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -957,7 +965,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { @@ -1071,39 +1079,39 @@ "date_released": "2021-02-27T14:47:19Z" }, "1.0.3": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.0.0", "date_released": "2021-04-06T23:45:20Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { - "associated_airflow_version": "2.1.3", + "associated_airflow_version": "2.1.2", "date_released": "2021-07-26T20:06:28Z" }, "2.0.2": { - "associated_airflow_version": "2.1.4", + "associated_airflow_version": "2.1.2", "date_released": "2021-08-30T21:27:32Z" }, "2.0.3": { - "associated_airflow_version": "2.2.2", + "associated_airflow_version": "2.1.2", "date_released": "2021-10-29T22:33:08Z" }, "2.1.0": { - "associated_airflow_version": "2.2.3", + "associated_airflow_version": "2.1.2", "date_released": "2021-11-30T10:33:58Z" }, "2.2.0": { - "associated_airflow_version": "2.2.4", + "associated_airflow_version": "2.1.2", "date_released": "2022-02-08T21:57:49Z" }, "2.3.0": { - "associated_airflow_version": "2.2.4", + "associated_airflow_version": "2.1.2", "date_released": "2022-03-07T20:27:47Z" }, "2.3.1": { - "associated_airflow_version": "2.2.4", + "associated_airflow_version": "2.1.2", "date_released": "2022-03-14T22:17:57Z" }, "2.3.2": { @@ -1347,6 +1355,10 @@ "1.6.0": { "associated_airflow_version": "2.10.1", "date_released": "2024-08-22T10:37:58Z" + }, + "1.6.1": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:52Z" } }, "apache.kylin": { @@ -1359,7 +1371,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -1433,11 +1445,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.1.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-04-06T23:45:20Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { @@ -1547,7 +1559,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -1613,11 +1625,11 @@ "date_released": "2020-12-09T21:48:29Z" }, "1.0.1": { - "associated_airflow_version": "2.0.1", + "associated_airflow_version": "2.1.0", "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -1719,15 +1731,15 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.0.2": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.0.0", "date_released": "2021-02-27T14:47:19Z" }, "1.0.3": { - "associated_airflow_version": "2.1.0", + "associated_airflow_version": "2.1.1", "date_released": "2021-05-01T09:04:42Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -1849,6 +1861,10 @@ "4.11.0": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "4.11.1": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "apprise": { @@ -1953,7 +1969,7 @@ }, "asana": { "1.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "1.1.0": { @@ -2089,7 +2105,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { @@ -2207,6 +2223,10 @@ "3.8.2": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "3.8.3": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "cloudant": { @@ -2219,7 +2239,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -2285,6 +2305,10 @@ "4.0.0": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "4.0.1": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "cncf.kubernetes": { @@ -2301,23 +2325,23 @@ "date_released": "2021-02-27T14:47:19Z" }, "1.1.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.0.0", "date_released": "2021-04-06T23:45:20Z" }, "1.2.0": { - "associated_airflow_version": "2.1.0", + "associated_airflow_version": "2.1.1", "date_released": "2021-05-01T09:04:42Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.3", "date_released": "2021-07-26T20:06:28Z" }, "2.0.2": { - "associated_airflow_version": "2.1.3", + "associated_airflow_version": "2.1.4", "date_released": "2021-08-27T18:18:45Z" }, "2.0.3": { @@ -2543,6 +2567,10 @@ "8.4.2": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "9.0.0": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "cohere": { @@ -2591,6 +2619,10 @@ "1.2.0": { "associated_airflow_version": "2.10.1", "date_released": "2024-08-22T10:37:57Z" + }, + "1.2.1": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "common.io": { @@ -2629,6 +2661,10 @@ "1.4.1": { "associated_airflow_version": "2.10.0", "date_released": "2024-09-24T13:49:56Z" + }, + "1.4.2": { + "associated_airflow_version": "2.10.0", + "date_released": "2024-10-14T07:10:53Z" } }, "common.sql": { @@ -2767,6 +2803,10 @@ "1.17.1": { "associated_airflow_version": "2.10.1", "date_released": "2024-10-01T09:05:14Z" + }, + "1.18.0": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:52Z" } }, "databricks": { @@ -2779,7 +2819,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -2937,6 +2977,10 @@ "6.10.0": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "6.11.0": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:52Z" } }, "datadog": { @@ -2949,7 +2993,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -3137,6 +3181,10 @@ "3.10.1": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "3.11.0": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "dingding": { @@ -3149,11 +3197,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.0.2": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-02-27T14:47:19Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -3219,7 +3267,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -3305,15 +3353,15 @@ "date_released": "2021-02-27T14:47:19Z" }, "1.1.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.0.0", "date_released": "2021-04-06T23:45:20Z" }, "1.2.0": { - "associated_airflow_version": "2.1.0", + "associated_airflow_version": "2.1.1", "date_released": "2021-05-01T09:04:42Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { @@ -3487,11 +3535,11 @@ "date_released": "2021-02-27T14:47:19Z" }, "1.0.3": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.0.0", "date_released": "2021-03-16T23:19:47Z" }, "1.0.4": { - "associated_airflow_version": "2.1.0", + "associated_airflow_version": "2.1.1", "date_released": "2021-05-01T09:04:42Z" }, "2.0.1": { @@ -3637,6 +3685,10 @@ "5.5.1": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "5.5.2": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "exasol": { @@ -3649,11 +3701,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.1.1": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-02-27T14:47:19Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -3821,6 +3873,10 @@ "1.4.0": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "1.4.1": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:52Z" } }, "facebook": { @@ -3833,11 +3889,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.1.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-04-06T23:45:20Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -3923,11 +3979,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.1.0": { - "associated_airflow_version": "2.1.0", + "associated_airflow_version": "2.1.1", "date_released": "2021-05-01T09:04:42Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -4111,23 +4167,23 @@ "date_released": "2020-12-09T21:48:29Z" }, "2.0.0": { - "associated_airflow_version": "2.0.0", + "associated_airflow_version": "2.1.0", "date_released": "2021-02-04T09:11:00Z" }, "2.1.0": { - "associated_airflow_version": "2.0.0", + "associated_airflow_version": "2.1.0", "date_released": "2021-02-27T14:47:19Z" }, "2.2.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.0", "date_released": "2021-04-06T23:45:20Z" }, "3.0.0": { - "associated_airflow_version": "2.1.0", + "associated_airflow_version": "2.1.1", "date_released": "2021-05-01T09:04:42Z" }, "4.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "5.0.0": { @@ -4349,6 +4405,10 @@ "10.23.0": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "10.24.0": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "grpc": { @@ -4361,11 +4421,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.1.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-04-06T23:45:20Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -4443,11 +4503,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.0.2": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-04-06T23:45:20Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { @@ -4561,11 +4621,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.1.1": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-02-27T14:47:19Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -4699,7 +4759,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -4871,7 +4931,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -4965,6 +5025,10 @@ "4.5.1": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "4.5.2": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "jenkins": { @@ -4977,11 +5041,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.1.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-02-27T14:47:19Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -5087,15 +5151,15 @@ "date_released": "2021-03-08T19:27:03Z" }, "1.3.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.0.0", "date_released": "2021-04-06T23:45:20Z" }, "2.0.0": { - "associated_airflow_version": "2.1.0", + "associated_airflow_version": "2.1.1", "date_released": "2021-05-01T09:04:42Z" }, "3.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "3.1.0": { @@ -5305,6 +5369,10 @@ "10.5.0": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "10.5.1": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:52Z" } }, "microsoft.mssql": { @@ -5317,11 +5385,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.1.0": { - "associated_airflow_version": "2.1.0", + "associated_airflow_version": "2.1.1", "date_released": "2021-05-01T09:04:42Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -5513,15 +5581,15 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.1.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.0.0", "date_released": "2021-04-06T23:45:20Z" }, "1.2.0": { - "associated_airflow_version": "2.1.0", + "associated_airflow_version": "2.1.1", "date_released": "2021-05-01T09:04:42Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -5599,7 +5667,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { @@ -5705,11 +5773,11 @@ "date_released": "2021-02-27T14:47:19Z" }, "1.1.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-04-06T23:45:20Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { @@ -5847,19 +5915,23 @@ "5.7.1": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "5.7.2": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "neo4j": { "1.0.0": { - "associated_airflow_version": "2.0.1", + "associated_airflow_version": "2.1.0", "date_released": "2021-02-04T09:11:00Z" }, "1.0.1": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-02-27T14:47:19Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -5949,7 +6021,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -6093,19 +6165,19 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.1.1": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-02-27T14:47:19Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2022-03-07T20:27:47Z" }, "2.0.2": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2022-03-14T22:17:57Z" }, "2.0.3": { @@ -6233,6 +6305,10 @@ "1.12.1": { "associated_airflow_version": "2.10.1", "date_released": "2024-10-01T09:05:14Z" + }, + "1.12.2": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "opensearch": { @@ -6267,6 +6343,10 @@ "1.4.0": { "associated_airflow_version": "2.10.1", "date_released": "2024-08-22T10:37:58Z" + }, + "1.5.0": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "opsgenie": { @@ -6279,11 +6359,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.0.2": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-04-06T23:45:20Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -6365,11 +6445,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.1.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-04-06T23:45:20Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -6495,7 +6575,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -6589,11 +6669,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.0.2": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-02-27T14:47:19Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -6747,15 +6827,15 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.0.2": { - "associated_airflow_version": "2.1.0", + "associated_airflow_version": "2.1.1", "date_released": "2021-05-01T09:04:42Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-07-26T20:06:28Z" }, "2.2.0": { @@ -6889,6 +6969,10 @@ "5.13.0": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "5.13.1": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "presto": { @@ -6901,11 +6985,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.0.2": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-02-27T14:47:19Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -7061,7 +7145,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -7151,11 +7235,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-04-06T23:45:20Z" }, "3.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "3.1.0": { @@ -7277,7 +7361,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "3.0.0": { @@ -7359,7 +7443,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -7425,11 +7509,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.0.2": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-02-27T14:47:19Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -7499,15 +7583,15 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.1.1": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.0.0", "date_released": "2021-02-27T14:47:19Z" }, "1.2.0": { - "associated_airflow_version": "2.1.0", + "associated_airflow_version": "2.1.1", "date_released": "2021-05-01T09:04:42Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { @@ -7661,11 +7745,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.1.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-04-06T23:45:20Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -7735,11 +7819,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "3.0.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-02-27T14:47:19Z" }, "4.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "4.0.1": { @@ -7939,15 +8023,15 @@ "date_released": "2021-03-08T19:27:03Z" }, "1.2.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.0.0", "date_released": "2021-04-06T23:45:20Z" }, "1.3.0": { - "associated_airflow_version": "2.1.0", + "associated_airflow_version": "2.1.1", "date_released": "2021-05-01T09:04:42Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { @@ -8129,6 +8213,10 @@ "5.7.1": { "associated_airflow_version": "2.10.1", "date_released": "2024-09-24T13:49:56Z" + }, + "5.8.0": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "sqlite": { @@ -8141,11 +8229,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.0.2": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-02-27T14:47:19Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -8259,11 +8347,11 @@ "date_released": "2021-02-27T14:47:19Z" }, "1.3.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-04-06T23:45:20Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { @@ -8393,11 +8481,11 @@ }, "tableau": { "1.0.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-02-27T14:47:19Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { @@ -8511,11 +8599,11 @@ "date_released": "2021-02-04T09:11:00Z" }, "1.0.2": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-02-27T14:47:19Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -8627,11 +8715,11 @@ }, "trino": { "1.0.0": { - "associated_airflow_version": "2.0.2", + "associated_airflow_version": "2.1.1", "date_released": "2021-04-06T23:45:20Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -8769,6 +8857,10 @@ "5.8.0": { "associated_airflow_version": "2.10.1", "date_released": "2024-08-22T10:37:58Z" + }, + "5.8.1": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "vertica": { @@ -8781,7 +8873,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { @@ -8937,7 +9029,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.1.0": { @@ -9041,6 +9133,10 @@ "1.3.0": { "associated_airflow_version": "2.10.1", "date_released": "2024-08-22T10:37:59Z" + }, + "1.4.0": { + "associated_airflow_version": "2.10.1", + "date_released": "2024-10-14T07:10:53Z" } }, "zendesk": { @@ -9053,7 +9149,7 @@ "date_released": "2021-02-04T09:11:00Z" }, "2.0.0": { - "associated_airflow_version": "2.1.1", + "associated_airflow_version": "2.1.2", "date_released": "2021-06-23T12:50:28Z" }, "2.0.1": { From 5316e618e0953563b81875677a21b23749817173 Mon Sep 17 00:00:00 2001 From: Ephraim Anierobi Date: Mon, 14 Oct 2024 15:26:05 +0100 Subject: [PATCH 109/125] Update json schema pre-commit to have draft7 schema in file (#43005) We request this file each time the CI is running pre-commit and sometimes it fails with 403 error. This PR adds the draft7 schema file locally for use by pre-commit to avoid this error --- .pre-commit-config.yaml | 4 +- scripts/ci/pre_commit/draft7_schema.json | 172 +++++++++++++++++++++++ 2 files changed, 174 insertions(+), 2 deletions(-) create mode 100644 scripts/ci/pre_commit/draft7_schema.json diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d492747856a92..a2f42e65192bf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -966,8 +966,8 @@ repos: name: Lint JSON Schema files entry: ./scripts/ci/pre_commit/json_schema.py args: - - --spec-url - - https://json-schema.org/draft-07/schema + - --spec-file + - scripts/ci/pre_commit/draft7_schema.json language: python pass_filenames: true files: .*\.schema\.json$ diff --git a/scripts/ci/pre_commit/draft7_schema.json b/scripts/ci/pre_commit/draft7_schema.json new file mode 100644 index 0000000000000..fb92c7f756b55 --- /dev/null +++ b/scripts/ci/pre_commit/draft7_schema.json @@ -0,0 +1,172 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "http://json-schema.org/draft-07/schema#", + "title": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "nonNegativeInteger": { + "type": "integer", + "minimum": 0 + }, + "nonNegativeIntegerDefault0": { + "allOf": [ + { "$ref": "#/definitions/nonNegativeInteger" }, + { "default": 0 } + ] + }, + "simpleTypes": { + "enum": [ + "array", + "boolean", + "integer", + "null", + "number", + "object", + "string" + ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "uniqueItems": true, + "default": [] + } + }, + "type": ["object", "boolean"], + "properties": { + "$id": { + "type": "string", + "format": "uri-reference" + }, + "$schema": { + "type": "string", + "format": "uri" + }, + "$ref": { + "type": "string", + "format": "uri-reference" + }, + "$comment": { + "type": "string" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": true, + "readOnly": { + "type": "boolean", + "default": false + }, + "writeOnly": { + "type": "boolean", + "default": false + }, + "examples": { + "type": "array", + "items": true + }, + "multipleOf": { + "type": "number", + "exclusiveMinimum": 0 + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "number" + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "number" + }, + "maxLength": { "$ref": "#/definitions/nonNegativeInteger" }, + "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { "$ref": "#" }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": true + }, + "maxItems": { "$ref": "#/definitions/nonNegativeInteger" }, + "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "contains": { "$ref": "#" }, + "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" }, + "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, + "required": { "$ref": "#/definitions/stringArray" }, + "additionalProperties": { "$ref": "#" }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "propertyNames": { "format": "regex" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "propertyNames": { "$ref": "#" }, + "const": true, + "enum": { + "type": "array", + "items": true, + "minItems": 1, + "uniqueItems": true + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" }, + { + "type": "array", + "items": { "$ref": "#/definitions/simpleTypes" }, + "minItems": 1, + "uniqueItems": true + } + ] + }, + "format": { "type": "string" }, + "contentMediaType": { "type": "string" }, + "contentEncoding": { "type": "string" }, + "if": { "$ref": "#" }, + "then": { "$ref": "#" }, + "else": { "$ref": "#" }, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "default": true +} From b979debb511119ba4a0ee709e1b2809d4041dad3 Mon Sep 17 00:00:00 2001 From: Pierre Jeambrun Date: Mon, 14 Oct 2024 23:08:15 +0800 Subject: [PATCH 110/125] AIP-84 Patch Variable (#42929) --- .../endpoints/variable_endpoint.py | 1 + airflow/api_fastapi/openapi/v1-generated.yaml | 94 ++++++++++++++++++- airflow/api_fastapi/serializers/dags.py | 2 +- airflow/api_fastapi/serializers/variables.py | 17 +++- airflow/api_fastapi/views/public/variables.py | 28 +++++- airflow/ui/openapi-gen/queries/common.ts | 3 + airflow/ui/openapi-gen/queries/queries.ts | 49 +++++++++- .../ui/openapi-gen/requests/schemas.gen.ts | 41 +++++++- .../ui/openapi-gen/requests/services.gen.ts | 36 +++++++ airflow/ui/openapi-gen/requests/types.gen.ts | 50 +++++++++- .../views/public/test_variables.py | 88 +++++++++++++++++ 11 files changed, 394 insertions(+), 15 deletions(-) diff --git a/airflow/api_connexion/endpoints/variable_endpoint.py b/airflow/api_connexion/endpoints/variable_endpoint.py index 8efddb58419c4..b1d9e2f5c8b6f 100644 --- a/airflow/api_connexion/endpoints/variable_endpoint.py +++ b/airflow/api_connexion/endpoints/variable_endpoint.py @@ -95,6 +95,7 @@ def get_variables( ) +@mark_fastapi_migration_done @security.requires_access_variable("PUT") @provide_session @action_logging( diff --git a/airflow/api_fastapi/openapi/v1-generated.yaml b/airflow/api_fastapi/openapi/v1-generated.yaml index 6c35ca212a2a6..759ab7fdd8e9b 100644 --- a/airflow/api_fastapi/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/openapi/v1-generated.yaml @@ -629,6 +629,72 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - Variable + summary: Patch Variable + description: Update a variable by key. + operationId: patch_variable + parameters: + - name: variable_key + in: path + required: true + schema: + type: string + title: Variable Key + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/VariableBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VariableResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /public/dags/{dag_id}/dagRuns/{dag_run_id}: get: tags: @@ -1045,7 +1111,7 @@ components: required: - is_paused title: DAGPatchBody - description: Dag Serializer for updatable body. + description: Dag Serializer for updatable bodies. DAGResponse: properties: dag_id: @@ -1492,25 +1558,47 @@ components: - msg - type title: ValidationError - VariableResponse: + VariableBody: properties: key: type: string title: Key + description: + anyOf: + - type: string + - type: 'null' + title: Description value: anyOf: - type: string - type: 'null' title: Value + type: object + required: + - key + - description + - value + title: VariableBody + description: Variable serializer for bodies. + VariableResponse: + properties: + key: + type: string + title: Key description: anyOf: - type: string - type: 'null' title: Description + value: + anyOf: + - type: string + - type: 'null' + title: Value type: object required: - key - - value - description + - value title: VariableResponse description: Variable serializer for responses. diff --git a/airflow/api_fastapi/serializers/dags.py b/airflow/api_fastapi/serializers/dags.py index 9879badf25048..c9d48aac222eb 100644 --- a/airflow/api_fastapi/serializers/dags.py +++ b/airflow/api_fastapi/serializers/dags.py @@ -95,7 +95,7 @@ def file_token(self) -> str: class DAGPatchBody(BaseModel): - """Dag Serializer for updatable body.""" + """Dag Serializer for updatable bodies.""" is_paused: bool diff --git a/airflow/api_fastapi/serializers/variables.py b/airflow/api_fastapi/serializers/variables.py index ded268432b89d..1ecc87425a24f 100644 --- a/airflow/api_fastapi/serializers/variables.py +++ b/airflow/api_fastapi/serializers/variables.py @@ -25,15 +25,20 @@ from airflow.utils.log.secrets_masker import redact -class VariableResponse(BaseModel): - """Variable serializer for responses.""" +class VariableBase(BaseModel): + """Base Variable serializer.""" model_config = ConfigDict(populate_by_name=True) key: str - val: str | None = Field(alias="value") description: str | None + +class VariableResponse(VariableBase): + """Variable serializer for responses.""" + + val: str | None = Field(alias="value") + @model_validator(mode="after") def redact_val(self) -> Self: if self.val is None: @@ -47,3 +52,9 @@ def redact_val(self) -> Self: # value is not a serialized string representation of a dict. self.val = redact(self.val, self.key) return self + + +class VariableBody(VariableBase): + """Variable serializer for bodies.""" + + value: str | None diff --git a/airflow/api_fastapi/views/public/variables.py b/airflow/api_fastapi/views/public/variables.py index e6cbb136f1cac..b4c07e23de293 100644 --- a/airflow/api_fastapi/views/public/variables.py +++ b/airflow/api_fastapi/views/public/variables.py @@ -16,14 +16,14 @@ # under the License. from __future__ import annotations -from fastapi import Depends, HTTPException +from fastapi import Depends, HTTPException, Query from sqlalchemy import select from sqlalchemy.orm import Session from typing_extensions import Annotated from airflow.api_fastapi.db.common import get_session from airflow.api_fastapi.openapi.exceptions import create_openapi_http_exception_doc -from airflow.api_fastapi.serializers.variables import VariableResponse +from airflow.api_fastapi.serializers.variables import VariableBody, VariableResponse from airflow.api_fastapi.views.router import AirflowRouter from airflow.models.variable import Variable @@ -56,3 +56,27 @@ async def get_variable( raise HTTPException(404, f"The Variable with key: `{variable_key}` was not found") return VariableResponse.model_validate(variable, from_attributes=True) + + +@variables_router.patch("/{variable_key}", responses=create_openapi_http_exception_doc([400, 401, 403, 404])) +async def patch_variable( + variable_key: str, + patch_body: VariableBody, + session: Annotated[Session, Depends(get_session)], + update_mask: list[str] | None = Query(None), +) -> VariableResponse: + """Update a variable by key.""" + if patch_body.key != variable_key: + raise HTTPException(400, "Invalid body, key from request body doesn't match uri parameter") + non_update_fields = {"key"} + variable = session.scalar(select(Variable).filter_by(key=variable_key).limit(1)) + if not variable: + raise HTTPException(404, f"The Variable with key: `{variable_key}` was not found") + if update_mask: + data = patch_body.dict(include=set(update_mask) - non_update_fields) + else: + data = patch_body.dict(exclude=non_update_fields) + for key, val in data.items(): + setattr(variable, key, val) + session.add(variable) + return variable diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index 393fad520a679..e3c0ef3ab4daf 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -191,6 +191,9 @@ export type DagServicePatchDagsMutationResult = Awaited< export type DagServicePatchDagMutationResult = Awaited< ReturnType >; +export type VariableServicePatchVariableMutationResult = Awaited< + ReturnType +>; export type ConnectionServiceDeleteConnectionMutationResult = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index 01e919cce53bf..b4c8cf9fea5c9 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -14,7 +14,7 @@ import { DashboardService, VariableService, } from "../requests/services.gen"; -import { DAGPatchBody, DagRunState } from "../requests/types.gen"; +import { DAGPatchBody, DagRunState, VariableBody } from "../requests/types.gen"; import * as Common from "./common"; /** @@ -428,6 +428,53 @@ export const useDagServicePatchDag = < }) as unknown as Promise, ...options, }); +/** + * Patch Variable + * Update a variable by key. + * @param data The data for the request. + * @param data.variableKey + * @param data.requestBody + * @param data.updateMask + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const useVariableServicePatchVariable = < + TData = Common.VariableServicePatchVariableMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: VariableBody; + updateMask?: string[]; + variableKey: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: VariableBody; + updateMask?: string[]; + variableKey: string; + }, + TContext + >({ + mutationFn: ({ requestBody, updateMask, variableKey }) => + VariableService.patchVariable({ + requestBody, + updateMask, + variableKey, + }) as unknown as Promise, + ...options, + }); /** * Delete Connection * Delete a connection entry. diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index 18df5284651b7..e42a3f6572ca9 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -526,7 +526,7 @@ export const $DAGPatchBody = { type: "object", required: ["is_paused"], title: "DAGPatchBody", - description: "Dag Serializer for updatable body.", + description: "Dag Serializer for updatable bodies.", } as const; export const $DAGResponse = { @@ -1182,12 +1182,23 @@ export const $ValidationError = { title: "ValidationError", } as const; -export const $VariableResponse = { +export const $VariableBody = { properties: { key: { type: "string", title: "Key", }, + description: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Description", + }, value: { anyOf: [ { @@ -1199,6 +1210,19 @@ export const $VariableResponse = { ], title: "Value", }, + }, + type: "object", + required: ["key", "description", "value"], + title: "VariableBody", + description: "Variable serializer for bodies.", +} as const; + +export const $VariableResponse = { + properties: { + key: { + type: "string", + title: "Key", + }, description: { anyOf: [ { @@ -1210,9 +1234,20 @@ export const $VariableResponse = { ], title: "Description", }, + value: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Value", + }, }, type: "object", - required: ["key", "value", "description"], + required: ["key", "description", "value"], title: "VariableResponse", description: "Variable serializer for responses.", } as const; diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index f0cbc099370f8..43d9e8d940df4 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -25,6 +25,8 @@ import type { DeleteVariableResponse, GetVariableData, GetVariableResponse, + PatchVariableData, + PatchVariableResponse, GetDagRunData, GetDagRunResponse, DeleteDagRunData, @@ -364,6 +366,40 @@ export class VariableService { }, }); } + + /** + * Patch Variable + * Update a variable by key. + * @param data The data for the request. + * @param data.variableKey + * @param data.requestBody + * @param data.updateMask + * @returns VariableResponse Successful Response + * @throws ApiError + */ + public static patchVariable( + data: PatchVariableData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "PATCH", + url: "/public/variables/{variable_key}", + path: { + variable_key: data.variableKey, + }, + query: { + update_mask: data.updateMask, + }, + body: data.requestBody, + mediaType: "application/json", + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } } export class DagRunService { diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 5fe3615c7d4e6..3deba451fced4 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -79,7 +79,7 @@ export type DAGDetailsResponse = { }; /** - * Dag Serializer for updatable body. + * Dag Serializer for updatable bodies. */ export type DAGPatchBody = { is_paused: boolean; @@ -250,13 +250,22 @@ export type ValidationError = { type: string; }; +/** + * Variable serializer for bodies. + */ +export type VariableBody = { + key: string; + description: string | null; + value: string | null; +}; + /** * Variable serializer for responses. */ export type VariableResponse = { key: string; - value: string | null; description: string | null; + value: string | null; }; export type NextRunAssetsData = { @@ -348,6 +357,14 @@ export type GetVariableData = { export type GetVariableResponse = VariableResponse; +export type PatchVariableData = { + requestBody: VariableBody; + updateMask?: Array | null; + variableKey: string; +}; + +export type PatchVariableResponse = VariableResponse; + export type GetDagRunData = { dagId: string; dagRunId: string; @@ -637,6 +654,35 @@ export type $OpenApiTs = { 422: HTTPValidationError; }; }; + patch: { + req: PatchVariableData; + res: { + /** + * Successful Response + */ + 200: VariableResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; }; "/public/dags/{dag_id}/dagRuns/{dag_run_id}": { get: { diff --git a/tests/api_fastapi/views/public/test_variables.py b/tests/api_fastapi/views/public/test_variables.py index 7957d0bf22249..cf5c78fd562cf 100644 --- a/tests/api_fastapi/views/public/test_variables.py +++ b/tests/api_fastapi/views/public/test_variables.py @@ -135,3 +135,91 @@ def test_get_should_respond_404(self, test_client): assert response.status_code == 404 body = response.json() assert f"The Variable with key: `{TEST_VARIABLE_KEY}` was not found" == body["detail"] + + +class TestPatchVariable(TestVariableEndpoint): + @pytest.mark.enable_redact + @pytest.mark.parametrize( + "key, body, params, expected_response", + [ + ( + TEST_VARIABLE_KEY, + { + "key": TEST_VARIABLE_KEY, + "value": "The new value", + "description": "The new description", + }, + None, + { + "key": TEST_VARIABLE_KEY, + "value": "The new value", + "description": "The new description", + }, + ), + ( + TEST_VARIABLE_KEY, + { + "key": TEST_VARIABLE_KEY, + "value": "The new value", + "description": "The new description", + }, + {"update_mask": ["value"]}, + { + "key": TEST_VARIABLE_KEY, + "value": "The new value", + "description": TEST_VARIABLE_DESCRIPTION, + }, + ), + ( + TEST_VARIABLE_KEY2, + { + "key": TEST_VARIABLE_KEY2, + "value": "some_other_value", + "description": TEST_VARIABLE_DESCRIPTION2, + }, + None, + { + "key": TEST_VARIABLE_KEY2, + "value": "***", + "description": TEST_VARIABLE_DESCRIPTION2, + }, + ), + ( + TEST_VARIABLE_KEY3, + { + "key": TEST_VARIABLE_KEY3, + "value": '{"password": "new_password"}', + "description": "new description", + }, + {"update_mask": ["value", "description"]}, + { + "key": TEST_VARIABLE_KEY3, + "value": '{"password": "***"}', + "description": "new description", + }, + ), + ], + ) + def test_patch_should_respond_200(self, test_client, session, key, body, params, expected_response): + self.create_variable() + response = test_client.patch(f"/public/variables/{key}", json=body, params=params) + assert response.status_code == 200 + assert response.json() == expected_response + + def test_patch_should_respond_400(self, test_client): + response = test_client.patch( + f"/public/variables/{TEST_VARIABLE_KEY}", + json={"key": "different_key", "value": "some_value", "description": None}, + ) + assert response.status_code == 400 + body = response.json() + assert "Invalid body, key from request body doesn't match uri parameter" == body["detail"] + + def test_patch_should_respond_404(self, test_client): + response = test_client.patch( + f"/public/variables/{TEST_VARIABLE_KEY}", + json={"key": TEST_VARIABLE_KEY, "value": "some_value", "description": None}, + ) + assert response.status_code == 404 + body = response.json() + assert f"The Variable with key: `{TEST_VARIABLE_KEY}` was not found" == body["detail"] From ced319fe95a731b745801fe9b15ca7b24ef0e82f Mon Sep 17 00:00:00 2001 From: Ephraim Anierobi Date: Mon, 14 Oct 2024 17:00:07 +0100 Subject: [PATCH 111/125] Commit the session between writing and deletion of RTIF (#42928) * Use different sessions in writing and deletion of RTIF Previously, this was how it was done, but now, a session was used for both the writing and deletion of RTIF, which we suspect caused StaleDataError. The related PR: https://github.com/apache/airflow/pull/38565 This PR brings back the old behaviour of using different sessions for writing/deleting RTIFs * fixup! Use different sessions in writing and deletion of RTIF * add test and use flush --- airflow/models/taskinstance.py | 3 +- tests/models/test_renderedtifields.py | 50 ++++++++++++++++++++++++++- 2 files changed, 51 insertions(+), 2 deletions(-) diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index 5b51bb0d24df8..c1373e5d6a12d 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -1632,11 +1632,12 @@ def _get_previous_ti( @internal_api_call @provide_session -def _update_rtif(ti, rendered_fields, session: Session | None = None): +def _update_rtif(ti, rendered_fields, session: Session = NEW_SESSION): from airflow.models.renderedtifields import RenderedTaskInstanceFields rtif = RenderedTaskInstanceFields(ti=ti, render_templates=False, rendered_fields=rendered_fields) RenderedTaskInstanceFields.write(rtif, session=session) + session.flush() RenderedTaskInstanceFields.delete_old_records(ti.task_id, ti.dag_id, session=session) diff --git a/tests/models/test_renderedtifields.py b/tests/models/test_renderedtifields.py index ea22d31871db9..6ff87b28a89b6 100644 --- a/tests/models/test_renderedtifields.py +++ b/tests/models/test_renderedtifields.py @@ -24,13 +24,16 @@ from datetime import date, timedelta from unittest import mock +import pendulum import pytest +from sqlalchemy import select from airflow import settings from airflow.configuration import conf from airflow.decorators import task as task_decorator -from airflow.models import Variable +from airflow.models import DagRun, Variable from airflow.models.renderedtifields import RenderedTaskInstanceFields as RTIF +from airflow.operators.python import PythonOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.task_instance_session import set_current_task_instance_session from airflow.utils.timezone import datetime @@ -386,3 +389,48 @@ def test_redact(self, redact, dag_maker): "env": "val 2", "cwd": "val 3", } + + @pytest.mark.skip_if_database_isolation_mode + def test_rtif_deletion_stale_data_error(self, dag_maker, session): + """ + Here we verify bad behavior. When we rerun a task whose RTIF + will get removed, we get a stale data error. + """ + with dag_maker(dag_id="test_retry_handling"): + task = PythonOperator( + task_id="test_retry_handling_op", + python_callable=lambda a, b: print(f"{a}\n{b}\n"), + op_args=[ + "dag {{dag.dag_id}};", + "try_number {{ti.try_number}};yo", + ], + ) + + def run_task(date): + run_id = f"abc_{date.to_date_string()}" + dr = session.scalar(select(DagRun).where(DagRun.execution_date == date, DagRun.run_id == run_id)) + if not dr: + dr = dag_maker.create_dagrun(execution_date=date, run_id=run_id) + ti = dr.task_instances[0] + ti.state = None + ti.try_number += 1 + session.commit() + ti.task = task + ti.run() + return dr + + base_date = pendulum.datetime(2021, 1, 1) + exec_dates = [base_date.add(days=x) for x in range(40)] + for date_ in exec_dates: + run_task(date=date_) + + session.commit() + session.expunge_all() + + # find oldest date + date = session.scalar( + select(DagRun.execution_date).join(RTIF.dag_run).order_by(DagRun.execution_date).limit(1) + ) + date = pendulum.instance(date) + # rerun the old date. this will fail + run_task(date=date) From 2e5607a44c7e56c8bc06ee79235af3fc8ba8a427 Mon Sep 17 00:00:00 2001 From: Kalyan R Date: Mon, 14 Oct 2024 22:28:28 +0530 Subject: [PATCH 112/125] Refactor FastApi Dag and DagRun endpoints tests (#42949) * wip - failing * update tests * update * update tests * update tests * fix * fix * fix mypy error * fix --- .../api_fastapi/views/public/test_dag_run.py | 52 +- tests/api_fastapi/views/public/test_dags.py | 477 +++++++++--------- 2 files changed, 270 insertions(+), 259 deletions(-) diff --git a/tests/api_fastapi/views/public/test_dag_run.py b/tests/api_fastapi/views/public/test_dag_run.py index f8d6780e50a3a..377aca4553d86 100644 --- a/tests/api_fastapi/views/public/test_dag_run.py +++ b/tests/api_fastapi/views/public/test_dag_run.py @@ -110,32 +110,32 @@ def setup(dag_maker, session=None): session.commit() -@pytest.mark.parametrize( - "dag_id, run_id, state, run_type, triggered_by, dag_run_note", - [ - (DAG1_ID, DAG1_RUN1_ID, DAG1_RUN1_STATE, DAG1_RUN1_RUN_TYPE, DAG1_RUN1_TRIGGERED_BY, DAG1_NOTE), - (DAG1_ID, DAG1_RUN2_ID, DAG1_RUN2_STATE, DAG1_RUN2_RUN_TYPE, DAG1_RUN2_TRIGGERED_BY, None), - (DAG2_ID, DAG2_RUN1_ID, DAG2_RUN1_STATE, DAG2_RUN1_RUN_TYPE, DAG2_RUN1_TRIGGERED_BY, None), - (DAG2_ID, DAG2_RUN2_ID, DAG2_RUN2_STATE, DAG2_RUN2_RUN_TYPE, DAG2_RUN2_TRIGGERED_BY, None), - ], -) -def test_get_dag_run(test_client, dag_id, run_id, state, run_type, triggered_by, dag_run_note): - response = test_client.get(f"/public/dags/{dag_id}/dagRuns/{run_id}") - assert response.status_code == 200 - body = response.json() - assert body["dag_id"] == dag_id - assert body["run_id"] == run_id - assert body["state"] == state - assert body["run_type"] == run_type - assert body["triggered_by"] == triggered_by.value - assert body["note"] == dag_run_note - - -def test_get_dag_run_not_found(test_client): - response = test_client.get(f"/public/dags/{DAG1_ID}/dagRuns/invalid") - assert response.status_code == 404 - body = response.json() - assert body["detail"] == "The DagRun with dag_id: `test_dag1` and run_id: `invalid` was not found" +class TestGetDagRun: + @pytest.mark.parametrize( + "dag_id, run_id, state, run_type, triggered_by, dag_run_note", + [ + (DAG1_ID, DAG1_RUN1_ID, DAG1_RUN1_STATE, DAG1_RUN1_RUN_TYPE, DAG1_RUN1_TRIGGERED_BY, DAG1_NOTE), + (DAG1_ID, DAG1_RUN2_ID, DAG1_RUN2_STATE, DAG1_RUN2_RUN_TYPE, DAG1_RUN2_TRIGGERED_BY, None), + (DAG2_ID, DAG2_RUN1_ID, DAG2_RUN1_STATE, DAG2_RUN1_RUN_TYPE, DAG2_RUN1_TRIGGERED_BY, None), + (DAG2_ID, DAG2_RUN2_ID, DAG2_RUN2_STATE, DAG2_RUN2_RUN_TYPE, DAG2_RUN2_TRIGGERED_BY, None), + ], + ) + def test_get_dag_run(self, test_client, dag_id, run_id, state, run_type, triggered_by, dag_run_note): + response = test_client.get(f"/public/dags/{dag_id}/dagRuns/{run_id}") + assert response.status_code == 200 + body = response.json() + assert body["dag_id"] == dag_id + assert body["run_id"] == run_id + assert body["state"] == state + assert body["run_type"] == run_type + assert body["triggered_by"] == triggered_by.value + assert body["note"] == dag_run_note + + def test_get_dag_run_not_found(self, test_client): + response = test_client.get(f"/public/dags/{DAG1_ID}/dagRuns/invalid") + assert response.status_code == 404 + body = response.json() + assert body["detail"] == "The DagRun with dag_id: `test_dag1` and run_id: `invalid` was not found" class TestDeleteDagRun: diff --git a/tests/api_fastapi/views/public/test_dags.py b/tests/api_fastapi/views/public/test_dags.py index 13520b37b3ff2..8bc10407e6287 100644 --- a/tests/api_fastapi/views/public/test_dags.py +++ b/tests/api_fastapi/views/public/test_dags.py @@ -114,238 +114,249 @@ def setup(dag_maker, session=None) -> None: _create_deactivated_paused_dag() -@pytest.mark.parametrize( - "query_params, expected_total_entries, expected_ids", - [ - # Filters - ({}, 2, [DAG1_ID, DAG2_ID]), - ({"limit": 1}, 2, [DAG1_ID]), - ({"offset": 1}, 2, [DAG2_ID]), - ({"tags": ["example"]}, 1, [DAG1_ID]), - ({"only_active": False}, 3, [DAG1_ID, DAG2_ID, DAG3_ID]), - ({"paused": True, "only_active": False}, 1, [DAG3_ID]), - ({"paused": False}, 2, [DAG1_ID, DAG2_ID]), - ({"owners": ["airflow"]}, 2, [DAG1_ID, DAG2_ID]), - ({"owners": ["test_owner"], "only_active": False}, 1, [DAG3_ID]), - ({"last_dag_run_state": "success", "only_active": False}, 1, [DAG3_ID]), - ({"last_dag_run_state": "failed", "only_active": False}, 1, [DAG1_ID]), - # # Sort - ({"order_by": "-dag_id"}, 2, [DAG2_ID, DAG1_ID]), - ({"order_by": "-dag_display_name"}, 2, [DAG2_ID, DAG1_ID]), - ({"order_by": "dag_display_name"}, 2, [DAG1_ID, DAG2_ID]), - ({"order_by": "next_dagrun", "only_active": False}, 3, [DAG3_ID, DAG1_ID, DAG2_ID]), - ({"order_by": "last_run_state", "only_active": False}, 3, [DAG1_ID, DAG3_ID, DAG2_ID]), - ({"order_by": "-last_run_state", "only_active": False}, 3, [DAG3_ID, DAG1_ID, DAG2_ID]), - ( - {"order_by": "last_run_start_date", "only_active": False}, - 3, - [DAG1_ID, DAG3_ID, DAG2_ID], - ), - ( - {"order_by": "-last_run_start_date", "only_active": False}, - 3, - [DAG3_ID, DAG1_ID, DAG2_ID], - ), - # Search - ({"dag_id_pattern": "1"}, 1, [DAG1_ID]), - ({"dag_display_name_pattern": "test_dag2"}, 1, [DAG2_ID]), - ], -) -def test_get_dags(test_client, query_params, expected_total_entries, expected_ids): - response = test_client.get("/public/dags", params=query_params) - - assert response.status_code == 200 - body = response.json() - - assert body["total_entries"] == expected_total_entries - assert [dag["dag_id"] for dag in body["dags"]] == expected_ids - - -@pytest.mark.parametrize( - "query_params, dag_id, body, expected_status_code, expected_is_paused", - [ - ({}, "fake_dag_id", {"is_paused": True}, 404, None), - ({"update_mask": ["field_1", "is_paused"]}, DAG1_ID, {"is_paused": True}, 400, None), - ({}, DAG1_ID, {"is_paused": True}, 200, True), - ({}, DAG1_ID, {"is_paused": False}, 200, False), - ({"update_mask": ["is_paused"]}, DAG1_ID, {"is_paused": True}, 200, True), - ({"update_mask": ["is_paused"]}, DAG1_ID, {"is_paused": False}, 200, False), - ], -) -def test_patch_dag(test_client, query_params, dag_id, body, expected_status_code, expected_is_paused): - response = test_client.patch(f"/public/dags/{dag_id}", json=body, params=query_params) - - assert response.status_code == expected_status_code - if expected_status_code == 200: - body = response.json() - assert body["is_paused"] == expected_is_paused - - -@pytest.mark.parametrize( - "query_params, body, expected_status_code, expected_ids, expected_paused_ids", - [ - ({"update_mask": ["field_1", "is_paused"]}, {"is_paused": True}, 400, None, None), - ( - {"only_active": False}, - {"is_paused": True}, - 200, - [], - [], - ), # no-op because the dag_id_pattern is not provided - ( - {"only_active": False, "dag_id_pattern": "~"}, - {"is_paused": True}, - 200, - [DAG1_ID, DAG2_ID, DAG3_ID], - [DAG1_ID, DAG2_ID, DAG3_ID], - ), - ( - {"only_active": False, "dag_id_pattern": "~"}, - {"is_paused": False}, - 200, - [DAG1_ID, DAG2_ID, DAG3_ID], - [], - ), - ( - {"dag_id_pattern": "~"}, - {"is_paused": True}, - 200, - [DAG1_ID, DAG2_ID], - [DAG1_ID, DAG2_ID], - ), - ( - {"dag_id_pattern": "dag1"}, - {"is_paused": True}, - 200, - [DAG1_ID], - [DAG1_ID], - ), - ], -) -def test_patch_dags(test_client, query_params, body, expected_status_code, expected_ids, expected_paused_ids): - response = test_client.patch("/public/dags", json=body, params=query_params) - - assert response.status_code == expected_status_code - if expected_status_code == 200: +class TestGetDags: + @pytest.mark.parametrize( + "query_params, expected_total_entries, expected_ids", + [ + # Filters + ({}, 2, [DAG1_ID, DAG2_ID]), + ({"limit": 1}, 2, [DAG1_ID]), + ({"offset": 1}, 2, [DAG2_ID]), + ({"tags": ["example"]}, 1, [DAG1_ID]), + ({"only_active": False}, 3, [DAG1_ID, DAG2_ID, DAG3_ID]), + ({"paused": True, "only_active": False}, 1, [DAG3_ID]), + ({"paused": False}, 2, [DAG1_ID, DAG2_ID]), + ({"owners": ["airflow"]}, 2, [DAG1_ID, DAG2_ID]), + ({"owners": ["test_owner"], "only_active": False}, 1, [DAG3_ID]), + ({"last_dag_run_state": "success", "only_active": False}, 1, [DAG3_ID]), + ({"last_dag_run_state": "failed", "only_active": False}, 1, [DAG1_ID]), + # # Sort + ({"order_by": "-dag_id"}, 2, [DAG2_ID, DAG1_ID]), + ({"order_by": "-dag_display_name"}, 2, [DAG2_ID, DAG1_ID]), + ({"order_by": "dag_display_name"}, 2, [DAG1_ID, DAG2_ID]), + ({"order_by": "next_dagrun", "only_active": False}, 3, [DAG3_ID, DAG1_ID, DAG2_ID]), + ({"order_by": "last_run_state", "only_active": False}, 3, [DAG1_ID, DAG3_ID, DAG2_ID]), + ({"order_by": "-last_run_state", "only_active": False}, 3, [DAG3_ID, DAG1_ID, DAG2_ID]), + ( + {"order_by": "last_run_start_date", "only_active": False}, + 3, + [DAG1_ID, DAG3_ID, DAG2_ID], + ), + ( + {"order_by": "-last_run_start_date", "only_active": False}, + 3, + [DAG3_ID, DAG1_ID, DAG2_ID], + ), + # Search + ({"dag_id_pattern": "1"}, 1, [DAG1_ID]), + ({"dag_display_name_pattern": "test_dag2"}, 1, [DAG2_ID]), + ], + ) + def test_get_dags(self, test_client, query_params, expected_total_entries, expected_ids): + response = test_client.get("/public/dags", params=query_params) + + assert response.status_code == 200 body = response.json() + + assert body["total_entries"] == expected_total_entries assert [dag["dag_id"] for dag in body["dags"]] == expected_ids - paused_dag_ids = [dag["dag_id"] for dag in body["dags"] if dag["is_paused"]] - assert paused_dag_ids == expected_paused_ids - - -@pytest.mark.parametrize( - "query_params, dag_id, expected_status_code, dag_display_name, start_date", - [ - ({}, "fake_dag_id", 404, "fake_dag", datetime(2023, 12, 31, tzinfo=timezone.utc)), - ({}, DAG2_ID, 200, DAG2_ID, DAG2_START_DATE), - ], -) -def test_dag_details(test_client, query_params, dag_id, expected_status_code, dag_display_name, start_date): - response = test_client.get(f"/public/dags/{dag_id}/details", params=query_params) - assert response.status_code == expected_status_code - if expected_status_code != 200: - return - - # Match expected and actual responses below. - res_json = response.json() - last_parsed = res_json["last_parsed"] - last_parsed_time = res_json["last_parsed_time"] - file_token = res_json["file_token"] - expected = { - "catchup": True, - "concurrency": 16, - "dag_id": dag_id, - "dag_display_name": dag_display_name, - "dag_run_timeout": None, - "dataset_expression": None, - "default_view": "grid", - "description": None, - "doc_md": "details", - "end_date": None, - "fileloc": "/opt/airflow/tests/api_fastapi/views/public/test_dags.py", - "file_token": file_token, - "has_import_errors": False, - "has_task_concurrency_limits": True, - "is_active": True, - "is_paused": False, - "is_paused_upon_creation": None, - "last_expired": None, - "last_parsed": last_parsed, - "last_parsed_time": last_parsed_time, - "last_pickled": None, - "max_active_runs": 16, - "max_active_tasks": 16, - "max_consecutive_failed_dag_runs": 0, - "next_dagrun": None, - "next_dagrun_create_after": None, - "next_dagrun_data_interval_end": None, - "next_dagrun_data_interval_start": None, - "orientation": "LR", - "owners": ["airflow"], - "params": { - "foo": { - "__class": "airflow.models.param.Param", - "description": None, - "schema": {}, - "value": 1, - } - }, - "pickle_id": None, - "render_template_as_native_obj": False, - "timetable_summary": None, - "scheduler_lock": None, - "start_date": start_date.replace(tzinfo=None).isoformat() + "Z", # pydantic datetime format - "tags": [], - "template_search_path": None, - "timetable_description": "Never, external triggers only", - "timezone": UTC_JSON_REPR, - } - assert res_json == expected - - -@pytest.mark.parametrize( - "query_params, dag_id, expected_status_code, dag_display_name", - [ - ({}, "fake_dag_id", 404, "fake_dag"), - ({}, DAG2_ID, 200, DAG2_ID), - ], -) -def test_get_dag(test_client, query_params, dag_id, expected_status_code, dag_display_name): - response = test_client.get(f"/public/dags/{dag_id}", params=query_params) - assert response.status_code == expected_status_code - if expected_status_code != 200: - return - - # Match expected and actual responses below. - res_json = response.json() - last_parsed_time = res_json["last_parsed_time"] - file_token = res_json["file_token"] - expected = { - "dag_id": dag_id, - "dag_display_name": dag_display_name, - "description": None, - "fileloc": "/opt/airflow/tests/api_fastapi/views/public/test_dags.py", - "file_token": file_token, - "is_paused": False, - "is_active": True, - "owners": ["airflow"], - "timetable_summary": None, - "tags": [], - "next_dagrun": None, - "has_task_concurrency_limits": True, - "next_dagrun_data_interval_start": None, - "next_dagrun_data_interval_end": None, - "max_active_runs": 16, - "max_consecutive_failed_dag_runs": 0, - "next_dagrun_create_after": None, - "last_expired": None, - "max_active_tasks": 16, - "last_pickled": None, - "default_view": "grid", - "last_parsed_time": last_parsed_time, - "scheduler_lock": None, - "timetable_description": "Never, external triggers only", - "has_import_errors": False, - "pickle_id": None, - } - assert res_json == expected + + +class TestPatchDag: + @pytest.mark.parametrize( + "query_params, dag_id, body, expected_status_code, expected_is_paused", + [ + ({}, "fake_dag_id", {"is_paused": True}, 404, None), + ({"update_mask": ["field_1", "is_paused"]}, DAG1_ID, {"is_paused": True}, 400, None), + ({}, DAG1_ID, {"is_paused": True}, 200, True), + ({}, DAG1_ID, {"is_paused": False}, 200, False), + ({"update_mask": ["is_paused"]}, DAG1_ID, {"is_paused": True}, 200, True), + ({"update_mask": ["is_paused"]}, DAG1_ID, {"is_paused": False}, 200, False), + ], + ) + def test_patch_dag( + self, test_client, query_params, dag_id, body, expected_status_code, expected_is_paused + ): + response = test_client.patch(f"/public/dags/{dag_id}", json=body, params=query_params) + + assert response.status_code == expected_status_code + if expected_status_code == 200: + body = response.json() + assert body["is_paused"] == expected_is_paused + + +class TestPatchDags: + @pytest.mark.parametrize( + "query_params, body, expected_status_code, expected_ids, expected_paused_ids", + [ + ({"update_mask": ["field_1", "is_paused"]}, {"is_paused": True}, 400, None, None), + ( + {"only_active": False}, + {"is_paused": True}, + 200, + [], + [], + ), # no-op because the dag_id_pattern is not provided + ( + {"only_active": False, "dag_id_pattern": "~"}, + {"is_paused": True}, + 200, + [DAG1_ID, DAG2_ID, DAG3_ID], + [DAG1_ID, DAG2_ID, DAG3_ID], + ), + ( + {"only_active": False, "dag_id_pattern": "~"}, + {"is_paused": False}, + 200, + [DAG1_ID, DAG2_ID, DAG3_ID], + [], + ), + ( + {"dag_id_pattern": "~"}, + {"is_paused": True}, + 200, + [DAG1_ID, DAG2_ID], + [DAG1_ID, DAG2_ID], + ), + ( + {"dag_id_pattern": "dag1"}, + {"is_paused": True}, + 200, + [DAG1_ID], + [DAG1_ID], + ), + ], + ) + def test_patch_dags( + self, test_client, query_params, body, expected_status_code, expected_ids, expected_paused_ids + ): + response = test_client.patch("/public/dags", json=body, params=query_params) + + assert response.status_code == expected_status_code + if expected_status_code == 200: + body = response.json() + assert [dag["dag_id"] for dag in body["dags"]] == expected_ids + paused_dag_ids = [dag["dag_id"] for dag in body["dags"] if dag["is_paused"]] + assert paused_dag_ids == expected_paused_ids + + +class TestDagDetails: + @pytest.mark.parametrize( + "query_params, dag_id, expected_status_code, dag_display_name, start_date", + [ + ({}, "fake_dag_id", 404, "fake_dag", datetime(2023, 12, 31, tzinfo=timezone.utc)), + ({}, DAG2_ID, 200, DAG2_ID, DAG2_START_DATE), + ], + ) + def test_dag_details( + self, test_client, query_params, dag_id, expected_status_code, dag_display_name, start_date + ): + response = test_client.get(f"/public/dags/{dag_id}/details", params=query_params) + assert response.status_code == expected_status_code + if expected_status_code != 200: + return + + # Match expected and actual responses below. + res_json = response.json() + last_parsed = res_json["last_parsed"] + last_parsed_time = res_json["last_parsed_time"] + file_token = res_json["file_token"] + expected = { + "catchup": True, + "concurrency": 16, + "dag_id": dag_id, + "dag_display_name": dag_display_name, + "dag_run_timeout": None, + "dataset_expression": None, + "default_view": "grid", + "description": None, + "doc_md": "details", + "end_date": None, + "fileloc": "/opt/airflow/tests/api_fastapi/views/public/test_dags.py", + "file_token": file_token, + "has_import_errors": False, + "has_task_concurrency_limits": True, + "is_active": True, + "is_paused": False, + "is_paused_upon_creation": None, + "last_expired": None, + "last_parsed": last_parsed, + "last_parsed_time": last_parsed_time, + "last_pickled": None, + "max_active_runs": 16, + "max_active_tasks": 16, + "max_consecutive_failed_dag_runs": 0, + "next_dagrun": None, + "next_dagrun_create_after": None, + "next_dagrun_data_interval_end": None, + "next_dagrun_data_interval_start": None, + "orientation": "LR", + "owners": ["airflow"], + "params": { + "foo": { + "__class": "airflow.models.param.Param", + "description": None, + "schema": {}, + "value": 1, + } + }, + "pickle_id": None, + "render_template_as_native_obj": False, + "timetable_summary": None, + "scheduler_lock": None, + "start_date": start_date.replace(tzinfo=None).isoformat() + "Z", # pydantic datetime format + "tags": [], + "template_search_path": None, + "timetable_description": "Never, external triggers only", + "timezone": UTC_JSON_REPR, + } + assert res_json == expected + + +class TestGetDag: + @pytest.mark.parametrize( + "query_params, dag_id, expected_status_code, dag_display_name", + [ + ({}, "fake_dag_id", 404, "fake_dag"), + ({}, DAG2_ID, 200, DAG2_ID), + ], + ) + def test_get_dag(self, test_client, query_params, dag_id, expected_status_code, dag_display_name): + response = test_client.get(f"/public/dags/{dag_id}", params=query_params) + assert response.status_code == expected_status_code + if expected_status_code != 200: + return + + # Match expected and actual responses below. + res_json = response.json() + last_parsed_time = res_json["last_parsed_time"] + file_token = res_json["file_token"] + expected = { + "dag_id": dag_id, + "dag_display_name": dag_display_name, + "description": None, + "fileloc": "/opt/airflow/tests/api_fastapi/views/public/test_dags.py", + "file_token": file_token, + "is_paused": False, + "is_active": True, + "owners": ["airflow"], + "timetable_summary": None, + "tags": [], + "next_dagrun": None, + "has_task_concurrency_limits": True, + "next_dagrun_data_interval_start": None, + "next_dagrun_data_interval_end": None, + "max_active_runs": 16, + "max_consecutive_failed_dag_runs": 0, + "next_dagrun_create_after": None, + "last_expired": None, + "max_active_tasks": 16, + "last_pickled": None, + "default_view": "grid", + "last_parsed_time": last_parsed_time, + "scheduler_lock": None, + "timetable_description": "Never, external triggers only", + "has_import_errors": False, + "pickle_id": None, + } + assert res_json == expected From 41225973475d0ead84cbac5526b264420f9fe6a4 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Mon, 14 Oct 2024 18:10:12 +0100 Subject: [PATCH 113/125] Minor updates in UI contributing docs (#43013) --- contributing-docs/14_node_environment_setup.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/contributing-docs/14_node_environment_setup.rst b/contributing-docs/14_node_environment_setup.rst index 81ced88240ac5..25635a89765e1 100644 --- a/contributing-docs/14_node_environment_setup.rst +++ b/contributing-docs/14_node_environment_setup.rst @@ -36,8 +36,8 @@ Committers will exercise their judgement on what endpoints should exist in the p Airflow UI ---------- -``airflow/ui`` is our React frontend powered. Dependencies are managed by pnpm and dev/build processes by `Vite `__ -Make sure you are using recent versions of ``pnpm\>=9`` and ``node/>=20``. ``breeze start-airflow`` will build the UI automatically. +``airflow/ui`` is our React frontend powered. Dependencies are managed by pnpm and dev/build processes by `Vite `__. +Make sure you are using recent versions of ``pnpm>=9`` and ``node>=20``. ``breeze start-airflow`` will build the UI automatically. Adding the ``--dev-mode`` flag will automatically run the vite dev server for hot reloading the UI during local development. pnpm commands From 4c5ad9c846262b8f5fe64669fcb36a60e3d330ae Mon Sep 17 00:00:00 2001 From: Pierre Jeambrun Date: Tue, 15 Oct 2024 03:26:35 +0800 Subject: [PATCH 114/125] AIP-84 post variable (#42948) --- .../endpoints/variable_endpoint.py | 1 + airflow/api_fastapi/openapi/v1-generated.yaml | 38 ++++++++++++++ airflow/api_fastapi/views/public/variables.py | 17 ++++++- airflow/ui/openapi-gen/queries/common.ts | 3 ++ airflow/ui/openapi-gen/queries/queries.ts | 39 +++++++++++++++ .../ui/openapi-gen/requests/services.gen.ts | 26 ++++++++++ airflow/ui/openapi-gen/requests/types.gen.ts | 29 +++++++++++ .../views/public/test_variables.py | 50 +++++++++++++++++++ 8 files changed, 201 insertions(+), 2 deletions(-) diff --git a/airflow/api_connexion/endpoints/variable_endpoint.py b/airflow/api_connexion/endpoints/variable_endpoint.py index b1d9e2f5c8b6f..20e7ce1edeabe 100644 --- a/airflow/api_connexion/endpoints/variable_endpoint.py +++ b/airflow/api_connexion/endpoints/variable_endpoint.py @@ -130,6 +130,7 @@ def patch_variable( return variable_schema.dump(variable) +@mark_fastapi_migration_done @security.requires_access_variable("POST") @action_logging( event=action_event_from_permission( diff --git a/airflow/api_fastapi/openapi/v1-generated.yaml b/airflow/api_fastapi/openapi/v1-generated.yaml index 759ab7fdd8e9b..235410a6d347a 100644 --- a/airflow/api_fastapi/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/openapi/v1-generated.yaml @@ -695,6 +695,44 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /public/variables/: + post: + tags: + - Variable + summary: Post Variable + description: Create a variable. + operationId: post_variable + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/VariableBody' + required: true + responses: + '201': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VariableResponse' + '401': + description: Unauthorized + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + '403': + description: Forbidden + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /public/dags/{dag_id}/dagRuns/{dag_run_id}: get: tags: diff --git a/airflow/api_fastapi/views/public/variables.py b/airflow/api_fastapi/views/public/variables.py index b4c07e23de293..a61b9bb930023 100644 --- a/airflow/api_fastapi/views/public/variables.py +++ b/airflow/api_fastapi/views/public/variables.py @@ -73,10 +73,23 @@ async def patch_variable( if not variable: raise HTTPException(404, f"The Variable with key: `{variable_key}` was not found") if update_mask: - data = patch_body.dict(include=set(update_mask) - non_update_fields) + data = patch_body.model_dump(include=set(update_mask) - non_update_fields) else: - data = patch_body.dict(exclude=non_update_fields) + data = patch_body.model_dump(exclude=non_update_fields) for key, val in data.items(): setattr(variable, key, val) session.add(variable) return variable + + +@variables_router.post("/", status_code=201, responses=create_openapi_http_exception_doc([401, 403])) +async def post_variable( + post_body: VariableBody, + session: Annotated[Session, Depends(get_session)], +) -> VariableResponse: + """Create a variable.""" + Variable.set(**post_body.model_dump(), session=session) + + variable = session.scalar(select(Variable).where(Variable.key == post_body.key).limit(1)) + + return VariableResponse.model_validate(variable, from_attributes=True) diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index e3c0ef3ab4daf..426e28447fd9c 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -185,6 +185,9 @@ export const UseDagRunServiceGetDagRunKeyFn = ( }, queryKey?: Array, ) => [useDagRunServiceGetDagRunKey, ...(queryKey ?? [{ dagId, dagRunId }])]; +export type VariableServicePostVariableMutationResult = Awaited< + ReturnType +>; export type DagServicePatchDagsMutationResult = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index b4c8cf9fea5c9..557a7ba8ffa17 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -295,6 +295,45 @@ export const useDagRunServiceGetDagRun = < queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, ...options, }); +/** + * Post Variable + * Create a variable. + * @param data The data for the request. + * @param data.requestBody + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const useVariableServicePostVariable = < + TData = Common.VariableServicePostVariableMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: VariableBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: VariableBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + VariableService.postVariable({ + requestBody, + }) as unknown as Promise, + ...options, + }); /** * Patch Dags * Patch multiple DAGs. diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 43d9e8d940df4..78b113c7f2f80 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -27,6 +27,8 @@ import type { GetVariableResponse, PatchVariableData, PatchVariableResponse, + PostVariableData, + PostVariableResponse, GetDagRunData, GetDagRunResponse, DeleteDagRunData, @@ -400,6 +402,30 @@ export class VariableService { }, }); } + + /** + * Post Variable + * Create a variable. + * @param data The data for the request. + * @param data.requestBody + * @returns VariableResponse Successful Response + * @throws ApiError + */ + public static postVariable( + data: PostVariableData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "POST", + url: "/public/variables/", + body: data.requestBody, + mediaType: "application/json", + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 422: "Validation Error", + }, + }); + } } export class DagRunService { diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 3deba451fced4..856517d560fe4 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -365,6 +365,12 @@ export type PatchVariableData = { export type PatchVariableResponse = VariableResponse; +export type PostVariableData = { + requestBody: VariableBody; +}; + +export type PostVariableResponse = VariableResponse; + export type GetDagRunData = { dagId: string; dagRunId: string; @@ -684,6 +690,29 @@ export type $OpenApiTs = { }; }; }; + "/public/variables/": { + post: { + req: PostVariableData; + res: { + /** + * Successful Response + */ + 201: VariableResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; "/public/dags/{dag_id}/dagRuns/{dag_run_id}": { get: { req: GetDagRunData; diff --git a/tests/api_fastapi/views/public/test_variables.py b/tests/api_fastapi/views/public/test_variables.py index cf5c78fd562cf..58a09538a971a 100644 --- a/tests/api_fastapi/views/public/test_variables.py +++ b/tests/api_fastapi/views/public/test_variables.py @@ -223,3 +223,53 @@ def test_patch_should_respond_404(self, test_client): assert response.status_code == 404 body = response.json() assert f"The Variable with key: `{TEST_VARIABLE_KEY}` was not found" == body["detail"] + + +class TestPostVariable(TestVariableEndpoint): + @pytest.mark.enable_redact + @pytest.mark.parametrize( + "body, expected_response", + [ + ( + { + "key": "new variable key", + "value": "new variable value", + "description": "new variable description", + }, + { + "key": "new variable key", + "value": "new variable value", + "description": "new variable description", + }, + ), + ( + { + "key": "another_password", + "value": "password_value", + "description": "another password", + }, + { + "key": "another_password", + "value": "***", + "description": "another password", + }, + ), + ( + { + "key": "another value with sensitive information", + "value": '{"password": "new_password"}', + "description": "some description", + }, + { + "key": "another value with sensitive information", + "value": '{"password": "***"}', + "description": "some description", + }, + ), + ], + ) + def test_post_should_respond_201(self, test_client, session, body, expected_response): + self.create_variable() + response = test_client.post("/public/variables/", json=body) + assert response.status_code == 201 + assert response.json() == expected_response From 2b101e2377f8d49a46aca6c219e4b38ee099a98d Mon Sep 17 00:00:00 2001 From: David Blain Date: Tue, 15 Oct 2024 02:41:36 +0200 Subject: [PATCH 115/125] Feature: Added event_handler parameter in MSGraphAsyncOperator (#42539) * refactor: Added parameter in MSGraphAsyncOperator to allow overriding default event_handler * docs: Added docstring for event_handler parameter in MSGraphAsyncOperator * refactor: Fixed TestMSGraphAsyncOperator * refactor: Check if event is not None * refactor: Register the TextParseNodeFactory and JsonParseNodeFactory so error messages get handled correctly in RequestAdapter * refactor: Reorganized import for TestMSGraphAsyncOperator * refactor: Added missing kiota-serialization packages in azure provider * refactor: Updated provider dependencies * refactor: Reorganized import of TestKiotaRequestAdapterHook * refactor: Downgraded version of json kiota serialization * refactor: Updated provider dependencies * refactor: Put import of Context in TYPE_CHECKING block * refactor: Fixed lookup of tenant-id * refactor: Fixed kiota serialization dependencies to 1.0.0 to avoid pendulum dependency issues for backward compatibility * refactor: Updated provider dependencies * refactored: Fixed import of test_utils in test_dag_run --------- Co-authored-by: David Blain --- generated/provider_dependencies.json | 2 + .../microsoft/azure/hooks/msgraph.py | 7 +++ .../microsoft/azure/operators/msgraph.py | 18 ++++++-- .../providers/microsoft/azure/provider.yaml | 2 + .../microsoft/azure/hooks/test_msgraph.py | 45 ++++++++++++++++++- .../microsoft/azure/operators/test_msgraph.py | 30 +++++++++++++ 6 files changed, 99 insertions(+), 5 deletions(-) diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index 4d921fc1fb911..8efdd5eae78ad 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -828,6 +828,8 @@ "azure-synapse-artifacts>=0.17.0", "azure-synapse-spark>=0.2.0", "microsoft-kiota-http>=1.3.0,!=1.3.4", + "microsoft-kiota-serialization-json==1.0.0", + "microsoft-kiota-serialization-text==1.0.0", "msgraph-core>=1.0.0" ], "devel-deps": [ diff --git a/providers/src/airflow/providers/microsoft/azure/hooks/msgraph.py b/providers/src/airflow/providers/microsoft/azure/hooks/msgraph.py index 61e555f4caa78..4ab3aaf3ba37f 100644 --- a/providers/src/airflow/providers/microsoft/azure/hooks/msgraph.py +++ b/providers/src/airflow/providers/microsoft/azure/hooks/msgraph.py @@ -32,11 +32,14 @@ from kiota_abstractions.method import Method from kiota_abstractions.request_information import RequestInformation from kiota_abstractions.response_handler import ResponseHandler +from kiota_abstractions.serialization import ParseNodeFactoryRegistry from kiota_authentication_azure.azure_identity_authentication_provider import ( AzureIdentityAuthenticationProvider, ) from kiota_http.httpx_request_adapter import HttpxRequestAdapter from kiota_http.middleware.options import ResponseHandlerOption +from kiota_serialization_json.json_parse_node_factory import JsonParseNodeFactory +from kiota_serialization_text.text_parse_node_factory import TextParseNodeFactory from msgraph_core import APIVersion, GraphClientFactory from msgraph_core._enums import NationalClouds @@ -249,8 +252,12 @@ def get_conn(self) -> RequestAdapter: scopes=scopes, allowed_hosts=allowed_hosts, ) + parse_node_factory = ParseNodeFactoryRegistry() + parse_node_factory.CONTENT_TYPE_ASSOCIATED_FACTORIES["text/plain"] = TextParseNodeFactory() + parse_node_factory.CONTENT_TYPE_ASSOCIATED_FACTORIES["application/json"] = JsonParseNodeFactory() request_adapter = HttpxRequestAdapter( authentication_provider=auth_provider, + parse_node_factory=parse_node_factory, http_client=http_client, base_url=base_url, ) diff --git a/providers/src/airflow/providers/microsoft/azure/operators/msgraph.py b/providers/src/airflow/providers/microsoft/azure/operators/msgraph.py index b3d14b14a57ec..0d187ebd5144b 100644 --- a/providers/src/airflow/providers/microsoft/azure/operators/msgraph.py +++ b/providers/src/airflow/providers/microsoft/azure/operators/msgraph.py @@ -44,6 +44,14 @@ from airflow.utils.context import Context +def default_event_handler(context: Context, event: dict[Any, Any] | None = None) -> Any: + if event: + if event.get("status") == "failure": + raise AirflowException(event.get("message")) + + return event.get("response") + + class MSGraphAsyncOperator(BaseOperator): """ A Microsoft Graph API operator which allows you to execute REST call to the Microsoft Graph API. @@ -69,6 +77,9 @@ class MSGraphAsyncOperator(BaseOperator): :param result_processor: Function to further process the response from MS Graph API (default is lambda: context, response: response). When the response returned by the `KiotaRequestAdapterHook` are bytes, then those will be base64 encoded into a string. + :param event_handler: Function to process the event returned from `MSGraphTrigger`. By default, when the + event returned by the `MSGraphTrigger` has a failed status, an AirflowException is being raised with + the message from the event, otherwise the response from the event payload is returned. :param serializer: Class which handles response serialization (default is ResponseSerializer). Bytes will be base64 encoded into a string, so it can be stored as an XCom. """ @@ -102,6 +113,7 @@ def __init__( api_version: APIVersion | str | None = None, pagination_function: Callable[[MSGraphAsyncOperator, dict, Context], tuple[str, dict]] | None = None, result_processor: Callable[[Context, Any], Any] = lambda context, result: result, + event_handler: Callable[[Context, dict[Any, Any] | None], Any] | None = None, serializer: type[ResponseSerializer] = ResponseSerializer, **kwargs: Any, ): @@ -121,6 +133,7 @@ def __init__( self.api_version = api_version self.pagination_function = pagination_function or self.paginate self.result_processor = result_processor + self.event_handler = event_handler or default_event_handler self.serializer: ResponseSerializer = serializer() def execute(self, context: Context) -> None: @@ -158,10 +171,7 @@ def execute_complete( if event: self.log.debug("%s completed with %s: %s", self.task_id, event.get("status"), event) - if event.get("status") == "failure": - raise AirflowException(event.get("message")) - - response = event.get("response") + response = self.event_handler(context, event) self.log.debug("response: %s", response) diff --git a/providers/src/airflow/providers/microsoft/azure/provider.yaml b/providers/src/airflow/providers/microsoft/azure/provider.yaml index cf0b3f75ef9d8..c4831a641b333 100644 --- a/providers/src/airflow/providers/microsoft/azure/provider.yaml +++ b/providers/src/airflow/providers/microsoft/azure/provider.yaml @@ -111,6 +111,8 @@ dependencies: # msgraph-core has transient import failures with microsoft-kiota-http==1.3.4 # See https://github.com/microsoftgraph/msgraph-sdk-python-core/issues/706 - microsoft-kiota-http>=1.3.0,!=1.3.4 + - microsoft-kiota-serialization-json==1.0.0 + - microsoft-kiota-serialization-text==1.0.0 devel-dependencies: - pywinrm diff --git a/providers/tests/microsoft/azure/hooks/test_msgraph.py b/providers/tests/microsoft/azure/hooks/test_msgraph.py index 0ecad98548b51..aff5d0226a1c4 100644 --- a/providers/tests/microsoft/azure/hooks/test_msgraph.py +++ b/providers/tests/microsoft/azure/hooks/test_msgraph.py @@ -19,11 +19,15 @@ import asyncio from json import JSONDecodeError from typing import TYPE_CHECKING -from unittest.mock import patch +from unittest.mock import Mock, patch import pytest +from httpx import Response from kiota_http.httpx_request_adapter import HttpxRequestAdapter +from kiota_serialization_json.json_parse_node import JsonParseNode +from kiota_serialization_text.text_parse_node import TextParseNode from msgraph_core import APIVersion, NationalClouds +from opentelemetry.trace import Span from airflow.exceptions import AirflowBadRequest, AirflowException, AirflowNotFoundException from airflow.providers.microsoft.azure.hooks.msgraph import ( @@ -175,6 +179,45 @@ def test_encoded_query_parameters(self): assert actual == {"%24expand": "reports,users,datasets,dataflows,dashboards", "%24top": 5000} + @pytest.mark.asyncio + async def test_throw_failed_responses_with_text_plain_content_type(self): + with patch( + "airflow.hooks.base.BaseHook.get_connection", + side_effect=get_airflow_connection, + ): + hook = KiotaRequestAdapterHook(conn_id="msgraph_api") + response = Mock(spec=Response) + response.headers = {"content-type": "text/plain"} + response.status_code = 429 + response.content = b"TenantThrottleThresholdExceeded" + response.is_success = False + span = Mock(spec=Span) + + actual = await hook.get_conn().get_root_parse_node(response, span, span) + + assert isinstance(actual, TextParseNode) + assert actual.get_str_value() == "TenantThrottleThresholdExceeded" + + @pytest.mark.asyncio + async def test_throw_failed_responses_with_application_json_content_type(self): + with patch( + "airflow.hooks.base.BaseHook.get_connection", + side_effect=get_airflow_connection, + ): + hook = KiotaRequestAdapterHook(conn_id="msgraph_api") + response = Mock(spec=Response) + response.headers = {"content-type": "application/json"} + response.status_code = 429 + response.content = b'{"error": {"code": "TenantThrottleThresholdExceeded"}}' + response.is_success = False + span = Mock(spec=Span) + + actual = await hook.get_conn().get_root_parse_node(response, span, span) + + assert isinstance(actual, JsonParseNode) + error_code = actual.get_child_node("error").get_child_node("code").get_str_value() + assert error_code == "TenantThrottleThresholdExceeded" + class TestResponseHandler: def test_default_response_handler_when_json(self): diff --git a/providers/tests/microsoft/azure/operators/test_msgraph.py b/providers/tests/microsoft/azure/operators/test_msgraph.py index 372152fe979ee..fe404e48e6f0a 100644 --- a/providers/tests/microsoft/azure/operators/test_msgraph.py +++ b/providers/tests/microsoft/azure/operators/test_msgraph.py @@ -19,6 +19,7 @@ import json import locale from base64 import b64encode +from typing import TYPE_CHECKING, Any import pytest @@ -35,6 +36,9 @@ mock_response, ) +if TYPE_CHECKING: + from airflow.utils.context import Context + class TestMSGraphAsyncOperator(Base): @pytest.mark.db_test @@ -101,6 +105,32 @@ def test_execute_when_an_exception_occurs(self): with pytest.raises(AirflowException): self.execute_operator(operator) + @pytest.mark.db_test + def test_execute_when_an_exception_occurs_on_custom_event_handler(self): + with self.patch_hook_and_request_adapter(AirflowException("An error occurred")): + + def custom_event_handler(context: Context, event: dict[Any, Any] | None = None): + if event: + if event.get("status") == "failure": + return None + + return event.get("response") + + operator = MSGraphAsyncOperator( + task_id="users_delta", + conn_id="msgraph_api", + url="users/delta", + event_handler=custom_event_handler, + ) + + results, events = self.execute_operator(operator) + + assert not results + assert len(events) == 1 + assert isinstance(events[0], TriggerEvent) + assert events[0].payload["status"] == "failure" + assert events[0].payload["message"] == "An error occurred" + @pytest.mark.db_test def test_execute_when_response_is_bytes(self): content = load_file("resources", "dummy.pdf", mode="rb", encoding=None) From f918f14854bc341eaf150127eeaa935728382ecc Mon Sep 17 00:00:00 2001 From: Gopal Dirisala <39794726+dirrao@users.noreply.github.com> Date: Tue, 15 Oct 2024 06:25:16 +0530 Subject: [PATCH 116/125] uv version bump to 0.4.20 (#42905) --- Dockerfile | 2 +- Dockerfile.ci | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 7b5fa8d0a2b44..c762cae900f2f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -50,7 +50,7 @@ ARG AIRFLOW_VERSION="2.10.2" ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" ARG AIRFLOW_PIP_VERSION=24.2 -ARG AIRFLOW_UV_VERSION=0.4.17 +ARG AIRFLOW_UV_VERSION=0.4.20 ARG AIRFLOW_USE_UV="false" ARG UV_HTTP_TIMEOUT="300" ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" diff --git a/Dockerfile.ci b/Dockerfile.ci index 7e0ee74556f9d..df1cdf8c27385 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -1278,7 +1278,7 @@ ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-main" ARG AIRFLOW_CI_BUILD_EPOCH="10" ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true" ARG AIRFLOW_PIP_VERSION=24.2 -ARG AIRFLOW_UV_VERSION=0.4.17 +ARG AIRFLOW_UV_VERSION=0.4.20 ARG AIRFLOW_USE_UV="true" # Setup PIP # By default PIP install run without cache to make image smaller From 272cd9ac6c1b437d282fd7dfc754ffa9d54a3e71 Mon Sep 17 00:00:00 2001 From: Gopal Dirisala <39794726+dirrao@users.noreply.github.com> Date: Tue, 15 Oct 2024 06:26:03 +0530 Subject: [PATCH 117/125] Removed deprecated Chainable type from BaseOperator (#42776) * Removed deprecated Chainable type from BaseOperator * Chainable word removed from spelling_wordlist.txt * Chainable word removed from spelling_wordlist.txt * Chainable word removed from spelling_wordlist.txt * news fragment added --- airflow/models/baseoperator.py | 5 ----- docs/spelling_wordlist.txt | 1 - newsfragments/42776.significant.rst | 1 + 3 files changed, 1 insertion(+), 6 deletions(-) create mode 100644 newsfragments/42776.significant.rst diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index 0c3d119be1983..514553e05a2dd 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -45,7 +45,6 @@ NoReturn, Sequence, TypeVar, - Union, cast, ) @@ -1789,10 +1788,6 @@ def expand_start_trigger_args(self, *, context: Context, session: Session) -> St return self.start_trigger_args -# TODO: Deprecate for Airflow 3.0 -Chainable = Union[DependencyMixin, Sequence[DependencyMixin]] - - def chain(*tasks: DependencyMixin | Sequence[DependencyMixin]) -> None: r""" Given a number of tasks, builds a dependency chain. diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 4413be1fc746a..78025bfe9fe22 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -227,7 +227,6 @@ cgi Cgroups cgroups cgroupspy -Chainable chakra Changelog changelog diff --git a/newsfragments/42776.significant.rst b/newsfragments/42776.significant.rst new file mode 100644 index 0000000000000..5fad7dfe79b66 --- /dev/null +++ b/newsfragments/42776.significant.rst @@ -0,0 +1 @@ +Removed deprecated ``Chainable`` type from ``BaseOperator``. From c9414840660f58996cd86351538e6ac77afd696b Mon Sep 17 00:00:00 2001 From: Kalyan R Date: Tue, 15 Oct 2024 07:17:31 +0530 Subject: [PATCH 118/125] pin in providers (#43001) --- generated/provider_dependencies.json | 6 +++--- providers/src/airflow/providers/apache/livy/provider.yaml | 2 +- providers/src/airflow/providers/dbt/cloud/provider.yaml | 2 +- providers/src/airflow/providers/http/provider.yaml | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index 8efdd5eae78ad..49b457cd81a70 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -243,7 +243,7 @@ "aiohttp>=3.9.2", "apache-airflow-providers-http", "apache-airflow>=2.8.0", - "asgiref" + "asgiref>=2.3.0" ], "devel-deps": [], "plugins": [], @@ -472,7 +472,7 @@ "aiohttp>=3.9.2", "apache-airflow-providers-http", "apache-airflow>=2.8.0", - "asgiref" + "asgiref>=2.3.0" ], "devel-deps": [], "plugins": [], @@ -746,7 +746,7 @@ "deps": [ "aiohttp>=3.9.2", "apache-airflow>=2.8.0", - "asgiref", + "asgiref>=2.3.0", "requests>=2.27.0,<3", "requests_toolbelt" ], diff --git a/providers/src/airflow/providers/apache/livy/provider.yaml b/providers/src/airflow/providers/apache/livy/provider.yaml index 2e149246aee74..48c5524808dc6 100644 --- a/providers/src/airflow/providers/apache/livy/provider.yaml +++ b/providers/src/airflow/providers/apache/livy/provider.yaml @@ -58,7 +58,7 @@ dependencies: - apache-airflow>=2.8.0 - apache-airflow-providers-http - aiohttp>=3.9.2 - - asgiref + - asgiref>=2.3.0 integrations: - integration-name: Apache Livy diff --git a/providers/src/airflow/providers/dbt/cloud/provider.yaml b/providers/src/airflow/providers/dbt/cloud/provider.yaml index 828aecf335435..4caa2eeba0019 100644 --- a/providers/src/airflow/providers/dbt/cloud/provider.yaml +++ b/providers/src/airflow/providers/dbt/cloud/provider.yaml @@ -59,7 +59,7 @@ versions: dependencies: - apache-airflow>=2.8.0 - apache-airflow-providers-http - - asgiref + - asgiref>=2.3.0 - aiohttp>=3.9.2 additional-extras: diff --git a/providers/src/airflow/providers/http/provider.yaml b/providers/src/airflow/providers/http/provider.yaml index c329428987276..e7a9b9ef94f84 100644 --- a/providers/src/airflow/providers/http/provider.yaml +++ b/providers/src/airflow/providers/http/provider.yaml @@ -67,7 +67,7 @@ dependencies: - requests>=2.27.0,<3 - requests_toolbelt - aiohttp>=3.9.2 - - asgiref + - asgiref>=2.3.0 integrations: - integration-name: Hypertext Transfer Protocol (HTTP) From cd1e9f5a0488ec53b382f1f639d80a3eba8ae307 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 15 Oct 2024 04:43:07 +0200 Subject: [PATCH 119/125] Move tests_common from "dev" to top-level. (#42985) * Move tests common without changes * Fix docstrings in tests_common * Move tests_common from "dev" to top-level. Follow-up after #42505 fixing teething problem with tests_common. Originally in #42505 common test code was moved to "dev" folder, but the "dev" folder is really dedicated to "build" scripts and the problem with moving "tests_common" to the folder was that the whole "dev" folder is replaced (for non-committer PRs) with the content from the target branch. This is done for security reasons, because we can accidentally use any of the scripts from dev in the CI build scripts and we might not notice, which will open us to a security issue where a file in "dev" coming from PR could be accidentally executed during the "pull_request_target" workflow - which would expose our secrets and GitHub Package write permissions to a contributor coming from a fork. This change moves the files, fixes pre-commit specification and docs, also fixes a number of "doc" issues detected by "ruff" in the tests_common folder as they were detected after the move. The tests_common folder is added to folders mounted when breeze is executed with local folders mounted (in order to avoid accidental mounting of randomly generated files to inside the breeze container). All imports for the common tests were updated to reflect this move. --- .dockerignore | 1 + .pre-commit-config.yaml | 8 +-- Dockerfile.ci | 1 + contributing-docs/testing/system_tests.rst | 2 +- contributing-docs/testing/unit_tests.rst | 6 +-- .../utils/docker_command_utils.py | 1 + .../airflow_breeze/utils/selective_checks.py | 2 +- dev/breeze/tests/test_selective_checks.py | 4 +- dev/perf/scheduler_dag_execution_timing.py | 2 +- .../airflow_aux/test_pod_template_file.py | 1 + .../cloud/log/test_oss_task_handler.py | 5 +- .../aws/auth_manager/avp/test_facade.py | 3 +- .../aws/auth_manager/cli/test_avp_commands.py | 5 +- .../test_aws_security_manager_override.py | 3 +- .../aws/auth_manager/test_aws_auth_manager.py | 7 ++- .../aws/auth_manager/views/test_auth.py | 5 +- .../executors/batch/test_batch_executor.py | 5 +- .../aws/executors/ecs/test_ecs_executor.py | 7 ++- .../tests/amazon/aws/hooks/test_base_aws.py | 3 +- providers/tests/amazon/aws/hooks/test_s3.py | 3 +- .../tests/amazon/aws/links/test_base_aws.py | 5 +- .../aws/log/test_cloudwatch_task_handler.py | 3 +- .../amazon/aws/log/test_s3_task_handler.py | 3 +- .../aws/secrets/test_systems_manager.py | 3 +- .../aws/transfers/test_redshift_to_s3.py | 3 +- .../aws/transfers/test_s3_to_redshift.py | 3 +- .../amazon/aws/transfers/test_s3_to_sftp.py | 3 +- .../amazon/aws/transfers/test_sftp_to_s3.py | 3 +- .../tests/apache/hive/hooks/test_hive.py | 2 +- .../tests/apache/livy/hooks/test_livy.py | 3 +- .../apache/spark/hooks/test_spark_sql.py | 3 +- .../tests/atlassian/jira/hooks/test_jira.py | 3 +- .../atlassian/jira/operators/test_jira.py | 3 +- .../tests/atlassian/jira/sensors/test_jira.py | 3 +- .../tests/celery/cli/test_celery_command.py | 5 +- .../celery/executors/test_celery_executor.py | 7 ++- .../celery/log_handlers/test_log_handlers.py | 5 +- .../kubernetes/cli/test_kubernetes_command.py | 3 +- .../executors/test_kubernetes_executor.py | 5 +- .../cncf/kubernetes/hooks/test_kubernetes.py | 5 +- .../log_handlers/test_log_handlers.py | 5 +- .../cncf/kubernetes/operators/test_pod.py | 3 +- .../operators/test_spark_kubernetes.py | 3 +- .../tests/cncf/kubernetes/test_client.py | 3 +- .../kubernetes/test_template_rendering.py | 3 +- .../common/io/operators/test_file_transfer.py | 4 +- .../tests/common/io/xcom/test_backend.py | 11 ++-- .../tests/common/sql/hooks/test_dbapi.py | 3 +- providers/tests/common/sql/hooks/test_sql.py | 2 +- .../tests/common/sql/hooks/test_sqlparse.py | 3 +- .../tests/common/sql/operators/test_sql.py | 3 +- .../common/sql/operators/test_sql_execute.py | 3 +- .../tests/common/sql/sensors/test_sql.py | 3 +- providers/tests/common/sql/test_utils.py | 3 +- providers/tests/conftest.py | 2 +- .../plugins/test_databricks_workflow.py | 3 +- .../api_endpoints/test_rpc_api_endpoint.py | 5 +- providers/tests/edge/cli/test_edge_command.py | 3 +- .../edge/plugins/test_edge_executor_plugin.py | 3 +- .../elasticsearch/log/test_es_task_handler.py | 4 +- .../api/auth/backend/test_basic_auth.py | 3 +- .../api/auth/backend/test_kerberos_auth.py | 2 +- .../api/auth/backend/test_session.py | 3 +- .../api_endpoints/api_connexion_utils.py | 2 +- .../api_endpoints/test_asset_endpoint.py | 6 +-- .../auth_manager/api_endpoints/test_auth.py | 11 ++-- .../api_endpoints/test_backfill_endpoint.py | 14 ++--- .../auth_manager/api_endpoints/test_cors.py | 7 ++- .../api_endpoints/test_dag_endpoint.py | 6 +-- .../api_endpoints/test_dag_run_endpoint.py | 4 +- .../api_endpoints/test_dag_source_endpoint.py | 4 +- .../test_dag_warning_endpoint.py | 4 +- .../api_endpoints/test_event_log_endpoint.py | 4 +- .../test_import_error_endpoint.py | 6 +-- .../test_role_and_permission_endpoint.py | 4 +- .../test_task_instance_endpoint.py | 4 +- .../api_endpoints/test_user_endpoint.py | 6 +-- .../api_endpoints/test_variable_endpoint.py | 4 +- .../api_endpoints/test_xcom_endpoint.py | 4 +- .../cli_commands/test_definition.py | 2 +- .../cli_commands/test_role_command.py | 5 +- .../cli_commands/test_sync_perm_command.py | 3 +- .../cli_commands/test_user_command.py | 3 +- .../auth_manager/cli_commands/test_utils.py | 3 +- providers/tests/fab/auth_manager/conftest.py | 5 +- .../fab/auth_manager/decorators/test_auth.py | 3 +- .../models/test_anonymous_user.py | 2 +- .../auth_manager/schemas/test_user_schema.py | 2 +- .../security_manager/test_constants.py | 2 +- .../security_manager/test_override.py | 2 +- .../fab/auth_manager/test_fab_auth_manager.py | 2 +- .../tests/fab/auth_manager/test_models.py | 3 +- .../tests/fab/auth_manager/test_security.py | 12 ++--- .../auth_manager/views/test_permissions.py | 4 +- .../fab/auth_manager/views/test_roles_list.py | 4 +- .../tests/fab/auth_manager/views/test_user.py | 4 +- .../fab/auth_manager/views/test_user_edit.py | 4 +- .../fab/auth_manager/views/test_user_stats.py | 4 +- .../cloud/hooks/test_bigquery_system.py | 2 +- .../google/cloud/hooks/test_kms_system.py | 2 +- .../cloud/hooks/test_secret_manager_system.py | 2 +- .../google/cloud/log/test_gcs_task_handler.py | 5 +- .../cloud/log/test_gcs_task_handler_system.py | 14 ++--- .../log/test_stackdriver_task_handler.py | 7 ++- .../test_stackdriver_task_handler_system.py | 14 ++--- .../google/cloud/operators/test_bigquery.py | 13 +++-- .../cloud/operators/test_bigquery_dts.py | 3 +- .../cloud/operators/test_dataprep_system.py | 7 ++- .../google/cloud/operators/test_dataproc.py | 5 +- .../cloud/operators/test_datastore_system.py | 4 +- .../google/cloud/operators/test_looker.py | 3 +- .../secrets/test_secret_manager_system.py | 2 +- .../test_facebook_ads_to_gcs_system.py | 10 ++-- .../test_salesforce_to_gcs_system.py | 12 ++--- .../google/cloud/utils/gcp_authenticator.py | 6 +-- .../common/auth_backend/test_google_openid.py | 9 ++-- .../operators/test_display_video_system.py | 12 ++--- providers/tests/hashicorp/hooks/test_vault.py | 3 +- providers/tests/imap/hooks/test_imap.py | 3 +- .../azure/log/test_wasb_task_handler.py | 5 +- providers/tests/mongo/hooks/test_mongo.py | 3 +- providers/tests/mysql/hooks/test_mysql.py | 4 +- .../tests/openlineage/extractors/test_bash.py | 3 +- .../openlineage/extractors/test_manager.py | 3 +- .../openlineage/extractors/test_python.py | 3 +- .../tests/openlineage/plugins/test_adapter.py | 5 +- .../openlineage/plugins/test_execution.py | 5 +- .../openlineage/plugins/test_listener.py | 5 +- .../openlineage/plugins/test_openlineage.py | 5 +- .../tests/openlineage/plugins/test_utils.py | 3 +- providers/tests/openlineage/test_conf.py | 3 +- .../tests/openlineage/utils/test_utils.py | 8 +-- .../opensearch/log/test_os_task_handler.py | 4 +- .../redis/log/test_redis_task_handler.py | 3 +- providers/tests/sftp/operators/test_sftp.py | 3 +- providers/tests/smtp/hooks/test_smtp.py | 3 +- .../tests/smtp/notifications/test_smtp.py | 5 +- providers/tests/ssh/operators/test_ssh.py | 3 +- .../tests/standard/operators/test_bash.py | 3 +- .../tests/standard/operators/test_datetime.py | 3 +- .../tests/standard/operators/test_weekday.py | 3 +- .../tests/standard/sensors/test_weekday.py | 3 +- .../airbyte/example_airbyte_trigger_job.py | 2 +- .../system/alibaba/example_adb_spark_batch.py | 4 +- .../system/alibaba/example_adb_spark_sql.py | 4 +- .../system/alibaba/example_oss_bucket.py | 4 +- .../system/alibaba/example_oss_object.py | 4 +- providers/tests/system/amazon/CONTRIBUTING.md | 2 +- .../system/amazon/aws/example_appflow.py | 4 +- .../system/amazon/aws/example_appflow_run.py | 4 +- .../tests/system/amazon/aws/example_athena.py | 4 +- .../amazon/aws/example_azure_blob_to_s3.py | 4 +- .../tests/system/amazon/aws/example_batch.py | 4 +- .../system/amazon/aws/example_bedrock.py | 4 +- .../example_bedrock_retrieve_and_generate.py | 4 +- .../amazon/aws/example_cloudformation.py | 4 +- .../system/amazon/aws/example_comprehend.py | 4 +- .../example_comprehend_document_classifier.py | 4 +- .../system/amazon/aws/example_datasync.py | 4 +- .../tests/system/amazon/aws/example_dms.py | 4 +- .../system/amazon/aws/example_dynamodb.py | 4 +- .../amazon/aws/example_dynamodb_to_s3.py | 4 +- .../tests/system/amazon/aws/example_ec2.py | 4 +- .../tests/system/amazon/aws/example_ecs.py | 4 +- .../system/amazon/aws/example_ecs_fargate.py | 4 +- .../amazon/aws/example_eks_templated.py | 4 +- .../example_eks_with_fargate_in_one_step.py | 4 +- .../aws/example_eks_with_fargate_profile.py | 4 +- .../example_eks_with_nodegroup_in_one_step.py | 4 +- .../amazon/aws/example_eks_with_nodegroups.py | 4 +- .../tests/system/amazon/aws/example_emr.py | 4 +- .../system/amazon/aws/example_emr_eks.py | 4 +- .../aws/example_emr_notebook_execution.py | 4 +- .../amazon/aws/example_emr_serverless.py | 4 +- .../system/amazon/aws/example_eventbridge.py | 2 +- .../system/amazon/aws/example_ftp_to_s3.py | 4 +- .../system/amazon/aws/example_gcs_to_s3.py | 4 +- .../amazon/aws/example_glacier_to_gcs.py | 4 +- .../tests/system/amazon/aws/example_glue.py | 4 +- .../amazon/aws/example_glue_data_quality.py | 4 +- ...e_glue_data_quality_with_recommendation.py | 4 +- .../amazon/aws/example_glue_databrew.py | 4 +- .../aws/example_google_api_sheets_to_s3.py | 4 +- .../aws/example_google_api_youtube_to_s3.py | 4 +- .../amazon/aws/example_hive_to_dynamodb.py | 4 +- .../system/amazon/aws/example_http_to_s3.py | 4 +- .../aws/example_imap_attachment_to_s3.py | 4 +- .../amazon/aws/example_kinesis_analytics.py | 4 +- .../tests/system/amazon/aws/example_lambda.py | 4 +- .../system/amazon/aws/example_local_to_s3.py | 4 +- .../system/amazon/aws/example_mongo_to_s3.py | 4 +- .../system/amazon/aws/example_neptune.py | 4 +- .../system/amazon/aws/example_quicksight.py | 4 +- .../system/amazon/aws/example_rds_event.py | 4 +- .../system/amazon/aws/example_rds_export.py | 4 +- .../system/amazon/aws/example_rds_instance.py | 4 +- .../system/amazon/aws/example_rds_snapshot.py | 4 +- .../system/amazon/aws/example_redshift.py | 4 +- .../aws/example_redshift_s3_transfers.py | 4 +- .../tests/system/amazon/aws/example_s3.py | 4 +- .../amazon/aws/example_s3_to_dynamodb.py | 4 +- .../system/amazon/aws/example_s3_to_ftp.py | 4 +- .../system/amazon/aws/example_s3_to_sftp.py | 4 +- .../system/amazon/aws/example_s3_to_sql.py | 5 +- .../system/amazon/aws/example_sagemaker.py | 4 +- .../amazon/aws/example_sagemaker_endpoint.py | 4 +- .../amazon/aws/example_sagemaker_notebook.py | 4 +- .../amazon/aws/example_sagemaker_pipeline.py | 4 +- .../amazon/aws/example_salesforce_to_s3.py | 4 +- .../system/amazon/aws/example_sftp_to_s3.py | 4 +- .../tests/system/amazon/aws/example_sns.py | 4 +- .../system/amazon/aws/example_sql_to_s3.py | 4 +- .../tests/system/amazon/aws/example_sqs.py | 4 +- .../amazon/aws/example_step_functions.py | 4 +- .../amazon/aws/tests/test_aws_auth_manager.py | 4 +- .../tests/system/apache/beam/example_beam.py | 2 +- .../apache/beam/example_beam_java_flink.py | 2 +- .../apache/beam/example_beam_java_spark.py | 2 +- .../tests/system/apache/beam/example_go.py | 2 +- .../system/apache/beam/example_go_dataflow.py | 2 +- .../apache/beam/example_java_dataflow.py | 2 +- .../system/apache/beam/example_python.py | 2 +- .../apache/beam/example_python_async.py | 2 +- .../apache/beam/example_python_dataflow.py | 2 +- .../apache/cassandra/example_cassandra_dag.py | 2 +- .../system/apache/drill/example_drill_dag.py | 2 +- .../system/apache/druid/example_druid_dag.py | 2 +- .../system/apache/hive/example_twitter_dag.py | 4 +- .../system/apache/iceberg/example_iceberg.py | 2 +- .../kafka/example_dag_event_listener.py | 2 +- .../apache/kafka/example_dag_hello_kafka.py | 2 +- .../system/apache/kylin/example_kylin_dag.py | 4 +- .../tests/system/apache/livy/example_livy.py | 4 +- .../tests/system/apache/pig/example_pig.py | 2 +- .../system/apache/pinot/example_pinot_dag.py | 2 +- .../system/apache/spark/example_pyspark.py | 2 +- .../system/apache/spark/example_spark_dag.py | 2 +- providers/tests/system/asana/example_asana.py | 4 +- .../cncf/kubernetes/example_kubernetes.py | 4 +- .../kubernetes/example_kubernetes_async.py | 4 +- .../example_kubernetes_decorator.py | 2 +- .../cncf/kubernetes/example_kubernetes_job.py | 4 +- .../kubernetes/example_kubernetes_resource.py | 4 +- .../kubernetes/example_spark_kubernetes.py | 4 +- .../example_cohere_embedding_operator.py | 2 +- .../io/example_file_transfer_local_to_s3.py | 4 +- .../sql/example_sql_column_table_check.py | 2 +- .../common/sql/example_sql_execute_query.py | 2 +- .../system/databricks/example_databricks.py | 4 +- .../databricks/example_databricks_repos.py | 4 +- .../databricks/example_databricks_sensors.py | 4 +- .../databricks/example_databricks_sql.py | 4 +- .../databricks/example_databricks_workflow.py | 4 +- .../system/dbt/cloud/example_dbt_cloud.py | 8 +-- .../tests/system/dingding/example_dingding.py | 4 +- .../tests/system/docker/example_docker.py | 2 +- .../system/docker/example_docker_copy_data.py | 2 +- .../system/docker/example_docker_swarm.py | 2 +- .../example_taskflow_api_docker_virtualenv.py | 8 ++- .../example_elasticsearch_query.py | 2 +- providers/tests/system/ftp/example_ftp.py | 4 +- .../tests/system/github/example_github.py | 2 +- .../tests/system/google/ads/example_ads.py | 4 +- .../cloud/automl/example_automl_dataset.py | 4 +- .../automl/example_automl_translation.py | 4 +- .../example_automl_video_classification.py | 4 +- .../automl/example_automl_video_tracking.py | 4 +- .../example_automl_vision_classification.py | 4 +- .../example_automl_vision_object_detection.py | 4 +- .../cloud/azure/example_azure_blob_to_gcs.py | 4 +- .../azure/example_azure_fileshare_to_gcs.py | 4 +- .../bigquery/example_bigquery_dataset.py | 4 +- .../cloud/bigquery/example_bigquery_dts.py | 4 +- .../bigquery/example_bigquery_operations.py | 4 +- .../example_bigquery_operations_location.py | 4 +- .../bigquery/example_bigquery_queries.py | 4 +- .../example_bigquery_queries_async.py | 4 +- .../bigquery/example_bigquery_sensors.py | 4 +- .../cloud/bigquery/example_bigquery_tables.py | 4 +- .../bigquery/example_bigquery_to_bigquery.py | 4 +- .../cloud/bigquery/example_bigquery_to_gcs.py | 4 +- .../bigquery/example_bigquery_to_gcs_async.py | 4 +- .../bigquery/example_bigquery_to_mssql.py | 4 +- .../bigquery/example_bigquery_to_mysql.py | 4 +- .../bigquery/example_bigquery_to_postgres.py | 4 +- .../bigquery/example_bigquery_transfer.py | 4 +- .../bigquery/example_bigquery_value_check.py | 6 +-- .../google/cloud/bigtable/example_bigtable.py | 4 +- .../cloud/cloud_batch/example_cloud_batch.py | 4 +- .../cloud/cloud_build/example_cloud_build.py | 4 +- .../example_cloud_build_trigger.py | 4 +- .../cloud_functions/example_functions.py | 4 +- .../example_cloud_memorystore_memcached.py | 4 +- .../example_cloud_memorystore_redis.py | 4 +- .../cloud/cloud_run/example_cloud_run.py | 4 +- .../cloud_run/example_cloud_run_service.py | 4 +- .../cloud/cloud_sql/example_cloud_sql.py | 4 +- .../cloud_sql/example_cloud_sql_query.py | 4 +- .../cloud_sql/example_cloud_sql_query_ssl.py | 4 +- .../cloud/composer/example_cloud_composer.py | 4 +- .../google/cloud/compute/example_compute.py | 4 +- .../cloud/compute/example_compute_igm.py | 4 +- .../cloud/compute/example_compute_ssh.py | 4 +- .../compute/example_compute_ssh_os_login.py | 4 +- .../compute/example_compute_ssh_parallel.py | 4 +- .../example_dlp_deidentify_content.py | 4 +- .../example_dlp_info_types.py | 4 +- .../example_dlp_inspect_template.py | 4 +- .../data_loss_prevention/example_dlp_job.py | 4 +- .../example_dlp_job_trigger.py | 4 +- .../cloud/dataflow/example_dataflow_go.py | 2 +- .../dataflow/example_dataflow_native_java.py | 4 +- .../example_dataflow_native_python.py | 4 +- .../example_dataflow_native_python_async.py | 4 +- .../dataflow/example_dataflow_pipeline.py | 4 +- .../example_dataflow_sensors_deferrable.py | 4 +- .../cloud/dataflow/example_dataflow_sql.py | 4 +- .../example_dataflow_streaming_python.py | 4 +- .../dataflow/example_dataflow_template.py | 4 +- .../cloud/dataflow/example_dataflow_yaml.py | 4 +- .../google/cloud/dataform/example_dataform.py | 4 +- .../cloud/datafusion/example_datafusion.py | 4 +- .../datapipelines/example_datapipeline.py | 4 +- .../google/cloud/dataplex/example_dataplex.py | 4 +- .../cloud/dataplex/example_dataplex_dp.py | 4 +- .../cloud/dataplex/example_dataplex_dq.py | 4 +- .../google/cloud/dataprep/example_dataprep.py | 4 +- .../cloud/dataproc/example_dataproc_batch.py | 4 +- .../example_dataproc_batch_deferrable.py | 4 +- .../example_dataproc_batch_persistent.py | 4 +- ...cluster_create_existing_stopped_cluster.py | 4 +- .../example_dataproc_cluster_deferrable.py | 4 +- .../example_dataproc_cluster_diagnose.py | 4 +- .../example_dataproc_cluster_generator.py | 4 +- .../example_dataproc_cluster_start_stop.py | 4 +- .../example_dataproc_cluster_update.py | 4 +- .../cloud/dataproc/example_dataproc_flink.py | 4 +- .../cloud/dataproc/example_dataproc_gke.py | 4 +- .../cloud/dataproc/example_dataproc_hadoop.py | 4 +- .../cloud/dataproc/example_dataproc_hive.py | 4 +- .../cloud/dataproc/example_dataproc_pig.py | 4 +- .../cloud/dataproc/example_dataproc_presto.py | 4 +- .../dataproc/example_dataproc_pyspark.py | 4 +- .../cloud/dataproc/example_dataproc_spark.py | 4 +- .../dataproc/example_dataproc_spark_async.py | 4 +- .../example_dataproc_spark_deferrable.py | 4 +- .../dataproc/example_dataproc_spark_sql.py | 4 +- .../cloud/dataproc/example_dataproc_sparkr.py | 4 +- .../cloud/dataproc/example_dataproc_trino.py | 4 +- .../dataproc/example_dataproc_workflow.py | 4 +- .../example_dataproc_workflow_deferrable.py | 4 +- .../example_dataproc_metastore.py | 4 +- .../example_dataproc_metastore_backup.py | 4 +- ...ataproc_metastore_hive_partition_sensor.py | 4 +- .../datastore/example_datastore_commit.py | 4 +- .../datastore/example_datastore_query.py | 4 +- .../datastore/example_datastore_rollback.py | 4 +- .../cloud/gcs/example_calendar_to_gcs.py | 4 +- .../google/cloud/gcs/example_firestore.py | 4 +- .../google/cloud/gcs/example_gcs_acl.py | 4 +- .../cloud/gcs/example_gcs_copy_delete.py | 4 +- .../google/cloud/gcs/example_gcs_sensor.py | 4 +- .../cloud/gcs/example_gcs_to_bigquery.py | 4 +- .../gcs/example_gcs_to_bigquery_async.py | 4 +- .../google/cloud/gcs/example_gcs_to_gcs.py | 4 +- .../google/cloud/gcs/example_gcs_to_gdrive.py | 4 +- .../google/cloud/gcs/example_gcs_to_sheets.py | 4 +- .../google/cloud/gcs/example_gcs_transform.py | 4 +- .../gcs/example_gcs_transform_timespan.py | 4 +- .../cloud/gcs/example_gcs_upload_download.py | 4 +- .../google/cloud/gcs/example_gdrive_to_gcs.py | 4 +- .../google/cloud/gcs/example_mssql_to_gcs.py | 4 +- .../google/cloud/gcs/example_mysql_to_gcs.py | 4 +- .../google/cloud/gcs/example_oracle_to_gcs.py | 4 +- .../google/cloud/gcs/example_s3_to_gcs.py | 4 +- .../google/cloud/gcs/example_sftp_to_gcs.py | 4 +- .../system/google/cloud/gcs/example_sheets.py | 4 +- .../google/cloud/gcs/example_sheets_to_gcs.py | 4 +- .../google/cloud/gcs/example_trino_to_gcs.py | 4 +- .../example_kubernetes_engine.py | 4 +- .../example_kubernetes_engine_async.py | 4 +- .../example_kubernetes_engine_job.py | 4 +- .../example_kubernetes_engine_kueue.py | 4 +- .../example_kubernetes_engine_resource.py | 4 +- .../life_sciences/example_life_sciences.py | 4 +- .../cloud/ml_engine/example_mlengine.py | 4 +- .../example_natural_language.py | 4 +- .../google/cloud/pubsub/example_pubsub.py | 4 +- .../cloud/pubsub/example_pubsub_deferrable.py | 4 +- .../google/cloud/spanner/example_spanner.py | 4 +- .../speech_to_text/example_speech_to_text.py | 4 +- .../sql_to_sheets/example_sql_to_sheets.py | 4 +- .../cloud/stackdriver/example_stackdriver.py | 4 +- ...mple_cloud_storage_transfer_service_aws.py | 4 +- ...mple_cloud_storage_transfer_service_gcp.py | 4 +- ...oud_storage_transfer_service_gcs_to_gcs.py | 4 +- .../google/cloud/tasks/example_queue.py | 4 +- .../google/cloud/tasks/example_tasks.py | 4 +- .../text_to_speech/example_text_to_speech.py | 4 +- .../cloud/transfers/example_gcs_to_sftp.py | 4 +- .../transfers/example_gdrive_to_local.py | 4 +- .../transfers/example_postgres_to_gcs.py | 4 +- .../cloud/translate/example_translate.py | 4 +- .../example_translate_speech.py | 4 +- ..._vertex_ai_auto_ml_forecasting_training.py | 4 +- ...xample_vertex_ai_auto_ml_image_training.py | 4 +- ...example_vertex_ai_auto_ml_list_training.py | 4 +- ...mple_vertex_ai_auto_ml_tabular_training.py | 4 +- ...xample_vertex_ai_auto_ml_video_training.py | 4 +- .../example_vertex_ai_batch_prediction_job.py | 4 +- .../example_vertex_ai_custom_container.py | 4 +- .../vertex_ai/example_vertex_ai_custom_job.py | 4 +- ...ple_vertex_ai_custom_job_python_package.py | 4 +- .../vertex_ai/example_vertex_ai_dataset.py | 4 +- .../vertex_ai/example_vertex_ai_endpoint.py | 4 +- .../example_vertex_ai_generative_model.py | 4 +- ...ample_vertex_ai_generative_model_tuning.py | 4 +- ...ple_vertex_ai_hyperparameter_tuning_job.py | 4 +- .../example_vertex_ai_list_custom_jobs.py | 4 +- .../example_vertex_ai_model_service.py | 4 +- .../example_vertex_ai_pipeline_job.py | 4 +- .../example_video_intelligence.py | 4 +- .../vision/example_vision_annotate_image.py | 4 +- .../vision/example_vision_autogenerated.py | 4 +- .../cloud/vision/example_vision_explicit.py | 4 +- .../cloud/workflows/example_workflows.py | 4 +- .../example_datacatalog_entries.py | 4 +- .../example_datacatalog_search_catalog.py | 4 +- .../example_datacatalog_tag_templates.py | 4 +- .../datacatalog/example_datacatalog_tags.py | 4 +- .../system/google/leveldb/example_leveldb.py | 4 +- .../example_analytics_admin.py | 4 +- .../example_campaign_manager.py | 4 +- .../marketing_platform/example_search_ads.py | 2 +- .../google/suite/example_local_to_drive.py | 4 +- providers/tests/system/http/example_http.py | 2 +- .../tests/system/influxdb/example_influxdb.py | 4 +- .../system/influxdb/example_influxdb_query.py | 2 +- .../tests/system/jdbc/example_jdbc_queries.py | 4 +- .../jenkins/example_jenkins_job_trigger.py | 2 +- .../azure/example_adf_run_pipeline.py | 4 +- .../microsoft/azure/example_adls_create.py | 4 +- .../microsoft/azure/example_adls_delete.py | 4 +- .../microsoft/azure/example_adls_list.py | 4 +- .../azure/example_azure_batch_operator.py | 2 +- .../example_azure_container_instances.py | 2 +- .../microsoft/azure/example_azure_cosmosdb.py | 4 +- .../azure/example_azure_service_bus.py | 4 +- .../microsoft/azure/example_azure_synapse.py | 2 +- .../microsoft/azure/example_fileshare.py | 4 +- .../microsoft/azure/example_local_to_adls.py | 4 +- .../microsoft/azure/example_local_to_wasb.py | 4 +- .../microsoft/azure/example_msfabric.py | 4 +- .../system/microsoft/azure/example_msgraph.py | 4 +- .../system/microsoft/azure/example_powerbi.py | 4 +- .../azure/example_powerbi_dataset_refresh.py | 4 +- .../microsoft/azure/example_s3_to_wasb.py | 4 +- .../microsoft/azure/example_sftp_to_wasb.py | 4 +- .../azure/example_synapse_run_pipeline.py | 4 +- .../microsoft/azure/example_wasb_sensors.py | 2 +- .../system/microsoft/mssql/example_mssql.py | 4 +- .../system/microsoft/winrm/example_winrm.py | 4 +- providers/tests/system/mysql/example_mysql.py | 2 +- providers/tests/system/neo4j/example_neo4j.py | 2 +- .../tests/system/openai/example_openai.py | 2 +- .../openai/example_trigger_batch_operator.py | 2 +- .../system/opensearch/example_opensearch.py | 4 +- .../system/opsgenie/example_opsgenie_alert.py | 2 +- .../opsgenie/example_opsgenie_notifier.py | 2 +- .../system/papermill/example_papermill.py | 2 +- .../example_papermill_remote_verify.py | 2 +- .../papermill/example_papermill_verify.py | 2 +- .../tests/system/pgvector/example_pgvector.py | 2 +- .../pgvector/example_pgvector_openai.py | 2 +- .../pinecone/example_create_pod_index.py | 2 +- .../example_create_serverless_index.py | 2 +- .../system/pinecone/example_dag_pinecone.py | 2 +- .../pinecone/example_pinecone_cohere.py | 2 +- .../pinecone/example_pinecone_openai.py | 2 +- .../tests/system/postgres/example_postgres.py | 4 +- .../system/presto/example_gcs_to_presto.py | 2 +- .../tests/system/qdrant/example_dag_qdrant.py | 2 +- .../system/redis/example_redis_publish.py | 4 +- .../tests/system/salesforce/example_bulk.py | 2 +- .../example_salesforce_apex_rest.py | 2 +- .../system/samba/example_gcs_to_samba.py | 4 +- .../tests/system/sftp/example_sftp_sensor.py | 4 +- .../system/singularity/example_singularity.py | 2 +- providers/tests/system/slack/example_slack.py | 2 +- .../system/slack/example_slack_webhook.py | 2 +- .../system/slack/example_sql_to_slack.py | 2 +- .../slack/example_sql_to_slack_webhook.py | 2 +- .../snowflake/example_copy_into_snowflake.py | 2 +- .../system/snowflake/example_snowflake.py | 2 +- .../snowflake/example_snowpark_decorator.py | 2 +- .../snowflake/example_snowpark_operator.py | 2 +- .../tests/system/sqlite/example_sqlite.py | 4 +- .../tests/system/tableau/example_tableau.py | 2 +- .../tests/system/telegram/example_telegram.py | 2 +- ...example_azure_blob_to_teradata_transfer.py | 4 +- .../example_s3_to_teradata_transfer.py | 4 +- .../system/teradata/example_ssl_teradata.py | 4 +- .../tests/system/teradata/example_teradata.py | 4 +- .../teradata/example_teradata_call_sp.py | 4 +- .../example_teradata_compute_cluster.py | 4 +- .../example_teradata_to_teradata_transfer.py | 4 +- .../system/trino/example_gcs_to_trino.py | 2 +- providers/tests/system/trino/example_trino.py | 2 +- .../weaviate/example_weaviate_cohere.py | 2 +- .../example_weaviate_dynamic_mapping_dag.py | 2 +- .../weaviate/example_weaviate_openai.py | 2 +- .../weaviate/example_weaviate_operator.py | 2 +- .../weaviate/example_weaviate_using_hook.py | 2 +- .../example_weaviate_vectorizer_dag.py | 2 +- ...example_weaviate_without_vectorizer_dag.py | 2 +- .../system/yandex/example_yandexcloud.py | 7 ++- .../yandex/example_yandexcloud_dataproc.py | 8 +-- ...xample_yandexcloud_dataproc_lightweight.py | 8 +-- .../system/yandex/example_yandexcloud_yq.py | 8 +-- providers/tests/system/ydb/example_ydb.py | 4 +- .../zendesk/example_zendesk_custom_get.py | 2 +- providers/tests/yandex/hooks/test_yandex.py | 4 +- providers/tests/yandex/links/test_yq.py | 5 +- providers/tests/yandex/operators/test_yq.py | 3 +- pyproject.toml | 6 +-- scripts/ci/docker-compose/local.yml | 3 ++ scripts/ci/pre_commit/check_system_tests.py | 8 +-- scripts/in_container/install_devel_deps.py | 2 +- task_sdk/tests/conftest.py | 2 +- tests/always/test_connection.py | 3 +- tests/always/test_example_dags.py | 3 +- tests/always/test_secrets.py | 5 +- tests/always/test_secrets_backends.py | 3 +- tests/always/test_secrets_local_filesystem.py | 3 +- tests/api_connexion/conftest.py | 7 ++- .../endpoints/test_asset_endpoint.py | 11 ++-- .../endpoints/test_backfill_endpoint.py | 5 +- .../endpoints/test_config_endpoint.py | 4 +- .../endpoints/test_connection_endpoint.py | 9 ++-- .../endpoints/test_dag_endpoint.py | 9 ++-- .../endpoints/test_dag_parsing.py | 5 +- .../endpoints/test_dag_run_endpoint.py | 11 ++-- .../endpoints/test_dag_source_endpoint.py | 5 +- .../endpoints/test_dag_stats_endpoint.py | 5 +- .../endpoints/test_dag_warning_endpoint.py | 5 +- .../endpoints/test_event_log_endpoint.py | 7 ++- .../endpoints/test_extra_link_endpoint.py | 11 ++-- .../endpoints/test_import_error_endpoint.py | 9 ++-- .../endpoints/test_log_endpoint.py | 5 +- .../test_mapped_task_instance_endpoint.py | 7 ++- .../endpoints/test_plugin_endpoint.py | 9 ++-- .../endpoints/test_pool_endpoint.py | 9 ++-- .../endpoints/test_provider_endpoint.py | 3 +- .../endpoints/test_task_endpoint.py | 5 +- .../endpoints/test_task_instance_endpoint.py | 7 ++- .../endpoints/test_variable_endpoint.py | 9 ++-- .../endpoints/test_xcom_endpoint.py | 7 ++- .../schemas/test_connection_schema.py | 3 +- .../schemas/test_dag_run_schema.py | 5 +- .../schemas/test_dataset_schema.py | 3 +- .../schemas/test_error_schema.py | 5 +- .../schemas/test_plugin_schema.py | 3 +- .../schemas/test_pool_schemas.py | 3 +- .../api_connexion/schemas/test_xcom_schema.py | 3 +- tests/api_connexion/test_auth.py | 7 ++- tests/api_connexion/test_parameters.py | 3 +- tests/api_connexion/test_security.py | 2 +- .../views/public/test_connections.py | 3 +- .../api_fastapi/views/public/test_dag_run.py | 3 +- tests/api_fastapi/views/public/test_dags.py | 3 +- .../views/public/test_variables.py | 3 +- tests/api_fastapi/views/ui/test_assets.py | 3 +- tests/api_fastapi/views/ui/test_dashboard.py | 3 +- .../endpoints/test_rpc_api_endpoint.py | 5 +- tests/api_internal/test_internal_api_call.py | 3 +- tests/assets/test_manager.py | 3 +- tests/assets/tests_asset.py | 2 +- tests/auth/managers/simple/views/test_auth.py | 3 +- tests/cli/commands/test_backfill_command.py | 3 +- tests/cli/commands/test_celery_command.py | 3 +- tests/cli/commands/test_config_command.py | 3 +- tests/cli/commands/test_connection_command.py | 3 +- tests/cli/commands/test_dag_command.py | 6 +-- .../commands/test_dag_processor_command.py | 3 +- .../cli/commands/test_fastapi_api_command.py | 1 + tests/cli/commands/test_info_command.py | 3 +- .../cli/commands/test_internal_api_command.py | 4 +- tests/cli/commands/test_jobs_command.py | 3 +- tests/cli/commands/test_kerberos_command.py | 3 +- tests/cli/commands/test_kubernetes_command.py | 3 +- tests/cli/commands/test_plugins_command.py | 16 +++--- .../test_rotate_fernet_key_command.py | 5 +- tests/cli/commands/test_scheduler_command.py | 3 +- tests/cli/commands/test_task_command.py | 7 ++- tests/cli/commands/test_variable_command.py | 3 +- tests/cli/commands/test_webserver_command.py | 4 +- tests/cli/conftest.py | 3 +- tests/cli/test_cli_parser.py | 3 +- tests/conftest.py | 4 +- tests/core/test_configuration.py | 6 +-- tests/core/test_core.py | 3 +- tests/core/test_example_dags_system.py | 5 +- tests/core/test_impersonation_tests.py | 3 +- tests/core/test_logging_config.py | 3 +- tests/core/test_otel_tracer.py | 3 +- tests/core/test_sentry.py | 3 +- tests/core/test_settings.py | 3 +- tests/core/test_sqlalchemy_config.py | 3 +- tests/core/test_stats.py | 3 +- tests/dag_processing/test_job_runner.py | 12 ++--- tests/dag_processing/test_processor.py | 11 ++-- ...st_external_task_sensor_check_existense.py | 1 + tests/dags/test_miscellaneous.py | 3 +- tests/dags/test_sensor.py | 3 +- tests/decorators/test_bash.py | 3 +- tests/decorators/test_mapped.py | 1 + tests/decorators/test_python.py | 4 +- tests/executors/test_executor_loader.py | 3 +- .../cli/commands/test_celery_command.py | 3 +- .../executors/test_celery_executor.py | 3 +- tests/integration/security/test_kerberos.py | 3 +- tests/jobs/test_base_job.py | 4 +- tests/jobs/test_local_task_job.py | 11 ++-- tests/jobs/test_scheduler_job.py | 22 ++++---- tests/jobs/test_triggerer_job.py | 4 +- tests/jobs/test_triggerer_job_logging.py | 3 +- tests/lineage/test_hook.py | 3 +- tests/lineage/test_lineage.py | 3 +- tests/listeners/class_listener.py | 3 +- tests/listeners/test_asset_listener.py | 1 + .../test_dag_import_error_listener.py | 10 ++-- tests/listeners/test_listeners.py | 1 + tests/models/test_backfill.py | 3 +- tests/models/test_base.py | 3 +- tests/models/test_baseoperator.py | 4 +- tests/models/test_cleartasks.py | 6 +-- tests/models/test_dag.py | 26 +++++----- tests/models/test_dagbag.py | 8 +-- tests/models/test_dagcode.py | 3 +- tests/models/test_dagrun.py | 10 ++-- tests/models/test_dagwarning.py | 3 +- tests/models/test_mappedoperator.py | 8 +-- tests/models/test_param.py | 3 +- tests/models/test_pool.py | 3 +- tests/models/test_renderedtifields.py | 5 +- tests/models/test_serialized_dag.py | 5 +- tests/models/test_skipmixin.py | 3 +- tests/models/test_taskinstance.py | 12 ++--- tests/models/test_timestamp.py | 3 +- tests/models/test_trigger.py | 3 +- tests/models/test_variable.py | 5 +- tests/models/test_xcom.py | 5 +- tests/models/test_xcom_arg.py | 3 +- tests/operators/test_branch_operator.py | 3 +- tests/operators/test_email.py | 3 +- tests/operators/test_latest_only_operator.py | 5 +- tests/operators/test_python.py | 7 ++- tests/plugins/test_plugin.py | 8 +-- tests/plugins/test_plugins_manager.py | 5 +- tests/secrets/test_cache.py | 3 +- tests/security/test_kerberos.py | 3 +- tests/sensors/test_base.py | 3 +- tests/sensors/test_external_task_sensor.py | 8 +-- tests/sensors/test_python.py | 1 + tests/serialization/test_dag_serialization.py | 37 +++++++------- tests/serialization/test_pydantic_models.py | 4 +- tests/serialization/test_serde.py | 3 +- .../serialization/test_serialized_objects.py | 4 +- .../example_external_task_child_deferrable.py | 2 +- ...example_external_task_parent_deferrable.py | 4 +- tests/system/example_empty.py | 4 +- .../task_runner/test_standard_task_runner.py | 6 +-- .../deps/test_pool_slots_available_dep.py | 3 +- tests/ti_deps/deps/test_prev_dagrun_dep.py | 5 +- .../deps/test_ready_to_reschedule_dep.py | 3 +- tests/utils/log/test_log_reader.py | 5 +- tests/utils/log/test_secrets_masker.py | 3 +- tests/utils/log/test_task_context_logger.py | 5 +- tests/utils/test_dag_cycle.py | 1 + tests/utils/test_db.py | 3 +- tests/utils/test_db_cleanup.py | 3 +- tests/utils/test_db_manager.py | 3 +- tests/utils/test_dot_renderer.py | 5 +- tests/utils/test_email.py | 3 +- tests/utils/test_file.py | 4 +- tests/utils/test_helpers.py | 5 +- tests/utils/test_log_handlers.py | 5 +- tests/utils/test_net.py | 3 +- tests/utils/test_serve_logs.py | 3 +- tests/utils/test_sqlalchemy.py | 3 +- tests/utils/test_state.py | 4 +- tests/utils/test_task_group.py | 6 +-- ...test_task_handler_with_custom_formatter.py | 7 ++- tests/utils/test_types.py | 4 +- tests/www/test_app.py | 5 +- tests/www/test_utils.py | 3 +- tests/www/views/conftest.py | 9 ++-- .../www/views/test_anonymous_as_admin_role.py | 3 +- tests/www/views/test_session.py | 5 +- tests/www/views/test_views.py | 15 +++--- tests/www/views/test_views_acl.py | 10 ++-- tests/www/views/test_views_base.py | 7 ++- .../www/views/test_views_cluster_activity.py | 3 +- tests/www/views/test_views_configuration.py | 5 +- tests/www/views/test_views_connection.py | 3 +- .../www/views/test_views_custom_user_views.py | 4 +- tests/www/views/test_views_dagrun.py | 8 +-- tests/www/views/test_views_dataset.py | 5 +- tests/www/views/test_views_decorators.py | 7 ++- tests/www/views/test_views_extra_links.py | 15 +++--- tests/www/views/test_views_grid.py | 7 ++- tests/www/views/test_views_home.py | 8 +-- tests/www/views/test_views_log.py | 11 ++-- tests/www/views/test_views_mount.py | 3 +- tests/www/views/test_views_paused.py | 3 +- tests/www/views/test_views_pool.py | 3 +- tests/www/views/test_views_rate_limit.py | 7 ++- tests/www/views/test_views_rendered.py | 7 ++- tests/www/views/test_views_robots.py | 2 +- tests/www/views/test_views_task_norun.py | 2 +- tests/www/views/test_views_tasks.py | 10 ++-- tests/www/views/test_views_trigger_dag.py | 4 +- tests/www/views/test_views_variable.py | 4 +- .../tests_common => tests_common}/__init__.py | 0 .../_internals/__init__.py | 0 .../_internals/capture_warnings.py | 4 +- .../_internals/forbidden_warnings.py | 2 +- .../pyproject.toml | 2 +- .../pytest_plugin.py | 51 ++++++++++--------- .../test_utils/README.md | 0 .../test_utils/__init__.py | 2 +- .../test_utils/api_connexion_utils.py | 12 ++--- .../test_utils/asserts.py | 9 +++- .../test_utils/azure_system_helpers.py | 16 +++--- .../test_utils/compat.py | 2 +- .../test_utils/config.py | 0 .../test_utils/db.py | 8 ++- .../test_utils/decorators.py | 0 .../test_utils/fake_datetime.py | 4 +- .../test_utils/gcp_system_helpers.py | 17 +++---- .../test_utils/get_all_tests.py | 8 ++- .../test_utils/hdfs_utils.py | 2 + .../test_utils/logging_command_executor.py | 6 ++- .../test_utils/mapping.py | 0 .../test_utils/mock_executor.py | 6 +-- .../test_utils/mock_operators.py | 51 ++++++++++--------- .../test_utils/mock_plugins.py | 0 .../test_utils/mock_security_manager.py | 4 +- .../test_utils/operators/__init__.py | 0 .../operators/postgres_local_executor.cfg | 0 .../test_utils/perf/__init__.py | 0 .../test_utils/perf/perf_kit/__init__.py | 5 +- .../test_utils/perf/perf_kit/memory.py | 4 +- .../test_utils/perf/perf_kit/python.py | 15 +++--- .../perf/perf_kit/repeat_and_time.py | 12 ++--- .../test_utils/perf/perf_kit/sqlalchemy.py | 15 +++--- .../test_utils/permissions.py | 2 + .../test_utils/providers.py | 4 +- .../remote_user_api_auth_backend.py | 2 +- .../test_utils/reset_warning_registry.py | 4 +- .../test_utils/salesforce_system_helpers.py | 0 .../test_utils/sftp_system_helpers.py | 2 +- .../test_utils/system_tests.py | 0 .../test_utils/system_tests_class.py | 13 ++--- .../test_utils/terraform.py | 4 +- .../test_utils/timetables.py | 2 + .../test_utils/watcher.py | 8 ++- .../test_utils/www.py | 0 768 files changed, 1560 insertions(+), 1752 deletions(-) rename {dev/tests_common => tests_common}/__init__.py (100%) rename {dev/tests_common => tests_common}/_internals/__init__.py (100%) rename {dev/tests_common => tests_common}/_internals/capture_warnings.py (99%) rename {dev/tests_common => tests_common}/_internals/forbidden_warnings.py (99%) rename {dev/tests_common => tests_common}/pyproject.toml (98%) rename {dev/tests_common => tests_common}/pytest_plugin.py (97%) rename {dev/tests_common => tests_common}/test_utils/README.md (100%) rename {dev/tests_common => tests_common}/test_utils/__init__.py (94%) rename {dev/tests_common => tests_common}/test_utils/api_connexion_utils.py (91%) rename {dev/tests_common => tests_common}/test_utils/asserts.py (95%) rename {dev/tests_common => tests_common}/test_utils/azure_system_helpers.py (93%) rename {dev/tests_common => tests_common}/test_utils/compat.py (99%) rename {dev/tests_common => tests_common}/test_utils/config.py (100%) rename {dev/tests_common => tests_common}/test_utils/db.py (97%) rename {dev/tests_common => tests_common}/test_utils/decorators.py (100%) rename {dev/tests_common => tests_common}/test_utils/fake_datetime.py (92%) rename {dev/tests_common => tests_common}/test_utils/gcp_system_helpers.py (95%) rename {dev/tests_common => tests_common}/test_utils/get_all_tests.py (92%) rename {dev/tests_common => tests_common}/test_utils/hdfs_utils.py (95%) rename {dev/tests_common => tests_common}/test_utils/logging_command_executor.py (97%) rename {dev/tests_common => tests_common}/test_utils/mapping.py (100%) rename {dev/tests_common => tests_common}/test_utils/mock_executor.py (97%) rename {dev/tests_common => tests_common}/test_utils/mock_operators.py (84%) rename {dev/tests_common => tests_common}/test_utils/mock_plugins.py (100%) rename {dev/tests_common => tests_common}/test_utils/mock_security_manager.py (89%) rename {dev/tests_common => tests_common}/test_utils/operators/__init__.py (100%) rename {dev/tests_common => tests_common}/test_utils/operators/postgres_local_executor.cfg (100%) rename {dev/tests_common => tests_common}/test_utils/perf/__init__.py (100%) rename {dev/tests_common => tests_common}/test_utils/perf/perf_kit/__init__.py (99%) rename {dev/tests_common => tests_common}/test_utils/perf/perf_kit/memory.py (94%) rename {dev/tests_common => tests_common}/test_utils/perf/perf_kit/python.py (90%) rename {dev/tests_common => tests_common}/test_utils/perf/perf_kit/repeat_and_time.py (92%) rename {dev/tests_common => tests_common}/test_utils/perf/perf_kit/sqlalchemy.py (97%) rename {dev/tests_common => tests_common}/test_utils/permissions.py (93%) rename {dev/tests_common => tests_common}/test_utils/providers.py (94%) rename {dev/tests_common => tests_common}/test_utils/remote_user_api_auth_backend.py (97%) rename {dev/tests_common => tests_common}/test_utils/reset_warning_registry.py (96%) rename {dev/tests_common => tests_common}/test_utils/salesforce_system_helpers.py (100%) rename {dev/tests_common => tests_common}/test_utils/sftp_system_helpers.py (99%) rename {dev/tests_common => tests_common}/test_utils/system_tests.py (100%) rename {dev/tests_common => tests_common}/test_utils/system_tests_class.py (95%) rename {dev/tests_common => tests_common}/test_utils/terraform.py (93%) rename {dev/tests_common => tests_common}/test_utils/timetables.py (97%) rename {dev/tests_common => tests_common}/test_utils/watcher.py (86%) rename {dev/tests_common => tests_common}/test_utils/www.py (100%) diff --git a/.dockerignore b/.dockerignore index 2d70030fd886b..e913ed4f43c89 100644 --- a/.dockerignore +++ b/.dockerignore @@ -42,6 +42,7 @@ # Add tests and kubernetes_tests to context. !tests +!tests_common !kubernetes_tests !helm_tests !docker_tests diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a2f42e65192bf..fe4315fafc212 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -364,7 +364,7 @@ repos: args: [] require_serial: true additional_dependencies: ["ruff==0.5.5"] - exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py|^airflow/contrib/ + exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py$ - id: replace-bad-characters name: Replace bad characters entry: ./scripts/ci/pre_commit/replace_bad_characters.py @@ -736,7 +736,7 @@ repos: name: Verify usage of Airflow deprecation classes in core entry: category=DeprecationWarning|category=PendingDeprecationWarning files: \.py$ - exclude: ^airflow/configuration\.py$|^providers/src/airflow/providers/|^scripts/in_container/verify_providers\.py$|^(providers/)?tests/.*$|^dev/tests_common/ + exclude: ^airflow/configuration\.py$|^providers/src/airflow/providers/|^scripts/in_container/verify_providers\.py$|^(providers/)?tests/.*$|^tests_common/ pass_filenames: true - id: check-provide-create-sessions-imports language: pygrep @@ -1184,7 +1184,9 @@ repos: ^(providers/)?tests/ | ^dev/.*\.py$ | ^scripts/.*\.py$ | - ^\w+_tests/ | + ^docker_tests/.*$ | + ^helm_tests/.*$ | + ^tests_common/.*$ | ^docs/.*\.py$ | ^hatch_build.py$ - id: check-provider-docs-valid diff --git a/Dockerfile.ci b/Dockerfile.ci index df1cdf8c27385..0cdf9899799f3 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -1369,6 +1369,7 @@ COPY pyproject.toml ${AIRFLOW_SOURCES}/pyproject.toml COPY providers/pyproject.toml ${AIRFLOW_SOURCES}/providers/pyproject.toml COPY task_sdk/pyproject.toml ${AIRFLOW_SOURCES}/task_sdk/pyproject.toml COPY airflow/__init__.py ${AIRFLOW_SOURCES}/airflow/ +COPY tests_common/ ${AIRFLOW_SOURCES}/tests_common/ COPY generated/* ${AIRFLOW_SOURCES}/generated/ COPY constraints/* ${AIRFLOW_SOURCES}/constraints/ COPY LICENSE ${AIRFLOW_SOURCES}/LICENSE diff --git a/contributing-docs/testing/system_tests.rst b/contributing-docs/testing/system_tests.rst index cc64d25e90cbc..fe010f76bd5d5 100644 --- a/contributing-docs/testing/system_tests.rst +++ b/contributing-docs/testing/system_tests.rst @@ -35,7 +35,7 @@ Airflow system tests are pretty special because they serve three purposes: Old System Tests ---------------- -The system tests derive from the ``dev.tests_common.test_utils.system_test_class.SystemTests`` class. +The system tests derive from the ``tests_common.test_utils.system_test_class.SystemTests`` class. Old versions of System tests should also be marked with ``@pytest.marker.system(SYSTEM)`` where ``system`` designates the system to be tested (for example, ``google.cloud``). These tests are skipped by default. diff --git a/contributing-docs/testing/unit_tests.rst b/contributing-docs/testing/unit_tests.rst index 468f10b68d4c5..dc4e27f10d9fe 100644 --- a/contributing-docs/testing/unit_tests.rst +++ b/contributing-docs/testing/unit_tests.rst @@ -1171,7 +1171,7 @@ are not part of the public API. We deal with it in one of the following ways: 1) If the whole provider is supposed to only work for later airflow version, we remove the whole provider by excluding it from compatibility test configuration (see below) -2) Some compatibility shims are defined in ``dev.tests_common.test_utils/compat.py`` - and they can be used to make the +2) Some compatibility shims are defined in ``tests_common.test_utils/compat.py`` - and they can be used to make the tests compatible - for example importing ``ParseImportError`` after the exception has been renamed from ``ImportError`` and it would fail in Airflow 2.9, but we have a fallback import in ``compat.py`` that falls back to old import automatically, so all tests testing / expecting ``ParseImportError`` should import @@ -1184,7 +1184,7 @@ are not part of the public API. We deal with it in one of the following ways: .. code-block:: python - from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS + from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS @pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="The tests should be skipped for Airflow < 2.8") @@ -1197,7 +1197,7 @@ are not part of the public API. We deal with it in one of the following ways: .. code-block:: python - from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES + from tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES @pytest.mark.skipif( diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py index 164a1583e1fea..2352bcda533a2 100644 --- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py @@ -97,6 +97,7 @@ ("pyproject.toml", "/opt/airflow/pyproject.toml"), ("scripts", "/opt/airflow/scripts"), ("scripts/docker/entrypoint_ci.sh", "/entrypoint"), + ("tests_common", "/opt/airflow/tests_common"), ("tests", "/opt/airflow/tests"), ("helm_tests", "/opt/airflow/helm_tests"), ("kubernetes_tests", "/opt/airflow/kubernetes_tests"), diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index cf4e124ff94e3..74a856f3693c6 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -245,7 +245,7 @@ def __hash__(self): ], FileGroupForCi.TESTS_UTILS_FILES: [ r"^tests/utils/", - r"^dev/tests_common/.*\.py$", + r"^tests_common/.*\.py$", ], FileGroupForCi.TASK_SDK_FILES: [ r"^task_sdk/src/airflow/sdk/.*\.py$", diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index 619dc8eec2f99..ec0769c0a87fe 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -778,7 +778,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ), ( pytest.param( - ("dev/tests_common/__init__.py",), + ("tests_common/__init__.py",), { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, "all-python-versions": "['3.9']", @@ -799,7 +799,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "needs-mypy": "true", "mypy-folders": "['airflow', 'providers', 'docs', 'dev']", }, - id="All tests should be run when dev/tests_common/ change", + id="All tests should be run when tests_common/ change", ) ), ], diff --git a/dev/perf/scheduler_dag_execution_timing.py b/dev/perf/scheduler_dag_execution_timing.py index fc5c21bda7aa7..1956c8023b5f0 100755 --- a/dev/perf/scheduler_dag_execution_timing.py +++ b/dev/perf/scheduler_dag_execution_timing.py @@ -107,7 +107,7 @@ def get_executor_under_test(dotted_path): from airflow.executors.executor_loader import ExecutorLoader if dotted_path == "MockExecutor": - from dev.tests_common.test_utils.mock_executor import MockExecutor as executor + from tests_common.test_utils.mock_executor import MockExecutor as executor else: executor = ExecutorLoader.load_executor(dotted_path) diff --git a/helm_tests/airflow_aux/test_pod_template_file.py b/helm_tests/airflow_aux/test_pod_template_file.py index 8c6c8c21f9805..5a507b7ee6aae 100644 --- a/helm_tests/airflow_aux/test_pod_template_file.py +++ b/helm_tests/airflow_aux/test_pod_template_file.py @@ -24,6 +24,7 @@ import pytest from helm_tests.airflow_aux.test_container_lifecycle import CONTAINER_LIFECYCLE_PARAMETERS + from tests.charts.helm_template_generator import render_chart diff --git a/providers/tests/alibaba/cloud/log/test_oss_task_handler.py b/providers/tests/alibaba/cloud/log/test_oss_task_handler.py index 18abe57aa09b7..ff65dd89ce4ef 100644 --- a/providers/tests/alibaba/cloud/log/test_oss_task_handler.py +++ b/providers/tests/alibaba/cloud/log/test_oss_task_handler.py @@ -22,14 +22,13 @@ from unittest.mock import PropertyMock import pytest +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_dags, clear_db_runs from airflow.providers.alibaba.cloud.log.oss_task_handler import OSSTaskHandler from airflow.utils.state import TaskInstanceState from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs - pytestmark = pytest.mark.db_test OSS_TASK_HANDLER_STRING = "airflow.providers.alibaba.cloud.log.oss_task_handler.{}" diff --git a/providers/tests/amazon/aws/auth_manager/avp/test_facade.py b/providers/tests/amazon/aws/auth_manager/avp/test_facade.py index 3d2a0195039df..af1149aec45fd 100644 --- a/providers/tests/amazon/aws/auth_manager/avp/test_facade.py +++ b/providers/tests/amazon/aws/auth_manager/avp/test_facade.py @@ -21,6 +21,7 @@ from unittest.mock import Mock import pytest +from tests_common.test_utils.config import conf_vars from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities, get_action_id, get_entity_type @@ -28,8 +29,6 @@ from airflow.providers.amazon.aws.auth_manager.user import AwsAuthManagerUser from airflow.utils.helpers import prune_dict -from dev.tests_common.test_utils.config import conf_vars - if TYPE_CHECKING: from airflow.auth.managers.base_auth_manager import ResourceMethod diff --git a/providers/tests/amazon/aws/auth_manager/cli/test_avp_commands.py b/providers/tests/amazon/aws/auth_manager/cli/test_avp_commands.py index 6122079fee4cb..c523beec564c7 100644 --- a/providers/tests/amazon/aws/auth_manager/cli/test_avp_commands.py +++ b/providers/tests/amazon/aws/auth_manager/cli/test_avp_commands.py @@ -20,13 +20,12 @@ from unittest.mock import ANY, Mock, patch import pytest +from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS +from tests_common.test_utils.config import conf_vars from airflow.cli import cli_parser from airflow.providers.amazon.aws.auth_manager.cli.avp_commands import init_avp, update_schema -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS -from dev.tests_common.test_utils.config import conf_vars - mock_boto3 = Mock() pytestmark = [ diff --git a/providers/tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py b/providers/tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py index b6071aac955c6..1c57bc14d83ed 100644 --- a/providers/tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py +++ b/providers/tests/amazon/aws/auth_manager/security_manager/test_aws_security_manager_override.py @@ -20,8 +20,7 @@ import pytest from flask import Flask - -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from tests_common.test_utils.compat import ignore_provider_compatibility_error python3_saml = pytest.importorskip("python3-saml") diff --git a/providers/tests/amazon/aws/auth_manager/test_aws_auth_manager.py b/providers/tests/amazon/aws/auth_manager/test_aws_auth_manager.py index 47e8a4cbcb26d..ba93b2105a81e 100644 --- a/providers/tests/amazon/aws/auth_manager/test_aws_auth_manager.py +++ b/providers/tests/amazon/aws/auth_manager/test_aws_auth_manager.py @@ -22,6 +22,9 @@ import pytest from flask import Flask, session from flask_appbuilder.menu import MenuItem +from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_2_9_PLUS +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.www import check_content_in_response from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities from airflow.providers.amazon.aws.auth_manager.avp.facade import AwsAuthManagerAmazonVerifiedPermissionsFacade @@ -39,10 +42,6 @@ from airflow.www import app as application from airflow.www.extensions.init_appbuilder import init_appbuilder -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_2_9_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.www import check_content_in_response - try: from airflow.auth.managers.models.resource_details import ( AccessView, diff --git a/providers/tests/amazon/aws/auth_manager/views/test_auth.py b/providers/tests/amazon/aws/auth_manager/views/test_auth.py index 9b2eec69188be..919eb59ead059 100644 --- a/providers/tests/amazon/aws/auth_manager/views/test_auth.py +++ b/providers/tests/amazon/aws/auth_manager/views/test_auth.py @@ -20,13 +20,12 @@ import pytest from flask import session, url_for +from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from tests_common.test_utils.config import conf_vars from airflow.exceptions import AirflowException from airflow.www import app as application -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS -from dev.tests_common.test_utils.config import conf_vars - pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Test requires Airflow 2.9+"), pytest.mark.skip_if_database_isolation_mode, diff --git a/providers/tests/amazon/aws/executors/batch/test_batch_executor.py b/providers/tests/amazon/aws/executors/batch/test_batch_executor.py index e7037bd16c85e..85272186b3324 100644 --- a/providers/tests/amazon/aws/executors/batch/test_batch_executor.py +++ b/providers/tests/amazon/aws/executors/batch/test_batch_executor.py @@ -26,6 +26,8 @@ import yaml from botocore.exceptions import ClientError, NoCredentialsError from semver import VersionInfo +from tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES +from tests_common.test_utils.config import conf_vars from airflow.exceptions import AirflowException from airflow.executors.base_executor import BaseExecutor @@ -46,9 +48,6 @@ from airflow.utils.state import State from airflow.version import version as airflow_version_str -from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES -from dev.tests_common.test_utils.config import conf_vars - airflow_version = VersionInfo(*map(int, airflow_version_str.split(".")[:3])) ARN1 = "arn1" diff --git a/providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py b/providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py index 50cdb580382f3..80b7d85a9eded 100644 --- a/providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py +++ b/providers/tests/amazon/aws/executors/ecs/test_ecs_executor.py @@ -32,6 +32,9 @@ from botocore.exceptions import ClientError from inflection import camelize from semver import VersionInfo +from tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES +from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS +from tests_common.test_utils.config import conf_vars from airflow.exceptions import AirflowException from airflow.executors.base_executor import BaseExecutor @@ -57,10 +60,6 @@ from airflow.utils.timezone import utcnow from airflow.version import version as airflow_version_str -from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.db_test airflow_version = VersionInfo(*map(int, airflow_version_str.split(".")[:3])) diff --git a/providers/tests/amazon/aws/hooks/test_base_aws.py b/providers/tests/amazon/aws/hooks/test_base_aws.py index c58993d748f2d..8919203da0f01 100644 --- a/providers/tests/amazon/aws/hooks/test_base_aws.py +++ b/providers/tests/amazon/aws/hooks/test_base_aws.py @@ -38,6 +38,7 @@ from botocore.utils import FileWebIdentityTokenLoader from moto import mock_aws from moto.core import DEFAULT_ACCOUNT_ID +from tests_common.test_utils.config import conf_vars from airflow.exceptions import AirflowException from airflow.models.connection import Connection @@ -50,8 +51,6 @@ ) from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper -from dev.tests_common.test_utils.config import conf_vars - pytest.importorskip("aiobotocore") MOCK_AWS_CONN_ID = "mock-conn-id" diff --git a/providers/tests/amazon/aws/hooks/test_s3.py b/providers/tests/amazon/aws/hooks/test_s3.py index 2e9a010006bab..89d29dae02dde 100644 --- a/providers/tests/amazon/aws/hooks/test_s3.py +++ b/providers/tests/amazon/aws/hooks/test_s3.py @@ -30,6 +30,7 @@ import pytest from botocore.exceptions import ClientError from moto import mock_aws +from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS from airflow.exceptions import AirflowException from airflow.models import Connection @@ -43,8 +44,6 @@ ) from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS - @pytest.fixture def mocked_s3_res(): diff --git a/providers/tests/amazon/aws/links/test_base_aws.py b/providers/tests/amazon/aws/links/test_base_aws.py index 78622870806fb..738fb6acc482c 100644 --- a/providers/tests/amazon/aws/links/test_base_aws.py +++ b/providers/tests/amazon/aws/links/test_base_aws.py @@ -21,14 +21,13 @@ from unittest import mock import pytest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.mock_operators import MockOperator from airflow.models.xcom import XCom from airflow.providers.amazon.aws.links.base_aws import BaseAwsLink from airflow.serialization.serialized_objects import SerializedDAG -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.mock_operators import MockOperator - if TYPE_CHECKING: from airflow.models.taskinstance import TaskInstance diff --git a/providers/tests/amazon/aws/log/test_cloudwatch_task_handler.py b/providers/tests/amazon/aws/log/test_cloudwatch_task_handler.py index c78fab89e197a..d07699d9a7049 100644 --- a/providers/tests/amazon/aws/log/test_cloudwatch_task_handler.py +++ b/providers/tests/amazon/aws/log/test_cloudwatch_task_handler.py @@ -26,6 +26,7 @@ import boto3 import pytest from moto import mock_aws +from tests_common.test_utils.config import conf_vars from watchtower import CloudWatchLogHandler from airflow.models import DAG, DagRun, TaskInstance @@ -36,8 +37,6 @@ from airflow.utils.state import State from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.config import conf_vars - def get_time_str(time_in_milliseconds): dt_time = dt.fromtimestamp(time_in_milliseconds / 1000.0, tz=timezone.utc) diff --git a/providers/tests/amazon/aws/log/test_s3_task_handler.py b/providers/tests/amazon/aws/log/test_s3_task_handler.py index 9819cf95e9cc1..a471bf0075479 100644 --- a/providers/tests/amazon/aws/log/test_s3_task_handler.py +++ b/providers/tests/amazon/aws/log/test_s3_task_handler.py @@ -26,6 +26,7 @@ import pytest from botocore.exceptions import ClientError from moto import mock_aws +from tests_common.test_utils.config import conf_vars from airflow.models import DAG, DagRun, TaskInstance from airflow.operators.empty import EmptyOperator @@ -34,8 +35,6 @@ from airflow.utils.state import State, TaskInstanceState from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.config import conf_vars - @pytest.fixture(autouse=True) def s3mock(): diff --git a/providers/tests/amazon/aws/secrets/test_systems_manager.py b/providers/tests/amazon/aws/secrets/test_systems_manager.py index c1b35a799d3c9..e1c035ab42047 100644 --- a/providers/tests/amazon/aws/secrets/test_systems_manager.py +++ b/providers/tests/amazon/aws/secrets/test_systems_manager.py @@ -21,12 +21,11 @@ import pytest from moto import mock_aws +from tests_common.test_utils.config import conf_vars from airflow.configuration import initialize_secrets_backends from airflow.providers.amazon.aws.secrets.systems_manager import SystemsManagerParameterStoreBackend -from dev.tests_common.test_utils.config import conf_vars - URI_CONNECTION = pytest.param( "postgres://my-login:my-pass@my-host:5432/my-schema?param1=val1¶m2=val2", id="uri-connection" ) diff --git a/providers/tests/amazon/aws/transfers/test_redshift_to_s3.py b/providers/tests/amazon/aws/transfers/test_redshift_to_s3.py index e27ff2d0aa56d..605705a2b663f 100644 --- a/providers/tests/amazon/aws/transfers/test_redshift_to_s3.py +++ b/providers/tests/amazon/aws/transfers/test_redshift_to_s3.py @@ -22,14 +22,13 @@ import pytest from boto3.session import Session +from tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces from airflow.exceptions import AirflowException from airflow.models.connection import Connection from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator from airflow.providers.amazon.aws.utils.redshift import build_credentials_block -from dev.tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces - class TestRedshiftToS3Transfer: @pytest.mark.parametrize("table_as_file_name, expected_s3_key", [[True, "key/table_"], [False, "key"]]) diff --git a/providers/tests/amazon/aws/transfers/test_s3_to_redshift.py b/providers/tests/amazon/aws/transfers/test_s3_to_redshift.py index 6e300791e0935..c9d9ca337d111 100644 --- a/providers/tests/amazon/aws/transfers/test_s3_to_redshift.py +++ b/providers/tests/amazon/aws/transfers/test_s3_to_redshift.py @@ -22,6 +22,7 @@ import pytest from boto3.session import Session +from tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces from airflow.exceptions import AirflowException from airflow.models.connection import Connection @@ -34,8 +35,6 @@ SchemaDatasetFacetFields, ) -from dev.tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces - class TestS3ToRedshiftTransfer: @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection") diff --git a/providers/tests/amazon/aws/transfers/test_s3_to_sftp.py b/providers/tests/amazon/aws/transfers/test_s3_to_sftp.py index 545398d9666f8..bcab8b2909af3 100644 --- a/providers/tests/amazon/aws/transfers/test_s3_to_sftp.py +++ b/providers/tests/amazon/aws/transfers/test_s3_to_sftp.py @@ -20,6 +20,7 @@ import boto3 import pytest from moto import mock_aws +from tests_common.test_utils.config import conf_vars from airflow.models import DAG from airflow.providers.amazon.aws.hooks.s3 import S3Hook @@ -28,8 +29,6 @@ from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.db_test diff --git a/providers/tests/amazon/aws/transfers/test_sftp_to_s3.py b/providers/tests/amazon/aws/transfers/test_sftp_to_s3.py index e4afe5c8c1efb..980c8a19daa55 100644 --- a/providers/tests/amazon/aws/transfers/test_sftp_to_s3.py +++ b/providers/tests/amazon/aws/transfers/test_sftp_to_s3.py @@ -20,6 +20,7 @@ import boto3 import pytest from moto import mock_aws +from tests_common.test_utils.config import conf_vars from airflow.models import DAG from airflow.providers.amazon.aws.hooks.s3 import S3Hook @@ -28,8 +29,6 @@ from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.db_test BUCKET = "test-bucket" diff --git a/providers/tests/apache/hive/hooks/test_hive.py b/providers/tests/apache/hive/hooks/test_hive.py index aee09db28088e..d975ce50fb692 100644 --- a/providers/tests/apache/hive/hooks/test_hive.py +++ b/providers/tests/apache/hive/hooks/test_hive.py @@ -25,6 +25,7 @@ import pandas as pd import pytest from hmsclient import HMSClient +from tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces from airflow.exceptions import AirflowException from airflow.models.connection import Connection @@ -34,7 +35,6 @@ from airflow.utils import timezone from airflow.utils.operator_helpers import AIRFLOW_VAR_NAME_FORMAT_MAPPING -from dev.tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces from providers.tests.apache.hive import ( BaseMockConnectionCursor, InvalidHiveCliHook, diff --git a/providers/tests/apache/livy/hooks/test_livy.py b/providers/tests/apache/livy/hooks/test_livy.py index 0fca347b06a99..d2968591f90f4 100644 --- a/providers/tests/apache/livy/hooks/test_livy.py +++ b/providers/tests/apache/livy/hooks/test_livy.py @@ -25,14 +25,13 @@ import requests from aiohttp import ClientResponseError, RequestInfo from requests.exceptions import RequestException +from tests_common.test_utils.db import clear_db_connections from airflow.exceptions import AirflowException from airflow.models import Connection from airflow.providers.apache.livy.hooks.livy import BatchState, LivyAsyncHook, LivyHook from airflow.utils import db -from dev.tests_common.test_utils.db import clear_db_connections - pytestmark = pytest.mark.skip_if_database_isolation_mode LIVY_CONN_ID = LivyHook.default_conn_name diff --git a/providers/tests/apache/spark/hooks/test_spark_sql.py b/providers/tests/apache/spark/hooks/test_spark_sql.py index 10f63cce99326..179e75ffc7470 100644 --- a/providers/tests/apache/spark/hooks/test_spark_sql.py +++ b/providers/tests/apache/spark/hooks/test_spark_sql.py @@ -22,14 +22,13 @@ from unittest.mock import call, patch import pytest +from tests_common.test_utils.db import clear_db_connections from airflow.exceptions import AirflowException from airflow.models import Connection from airflow.providers.apache.spark.hooks.spark_sql import SparkSqlHook from airflow.utils import db -from dev.tests_common.test_utils.db import clear_db_connections - pytestmark = pytest.mark.db_test diff --git a/providers/tests/atlassian/jira/hooks/test_jira.py b/providers/tests/atlassian/jira/hooks/test_jira.py index e2cf9389471e7..fbb7913d128f0 100644 --- a/providers/tests/atlassian/jira/hooks/test_jira.py +++ b/providers/tests/atlassian/jira/hooks/test_jira.py @@ -20,13 +20,12 @@ from unittest import mock import pytest +from tests_common.test_utils.compat import connection_as_json from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.models import Connection from airflow.providers.atlassian.jira.hooks.jira import JiraHook -from dev.tests_common.test_utils.compat import connection_as_json - @pytest.fixture def mocked_jira_client(): diff --git a/providers/tests/atlassian/jira/operators/test_jira.py b/providers/tests/atlassian/jira/operators/test_jira.py index 614c1a4939a90..67c63a60d61d4 100644 --- a/providers/tests/atlassian/jira/operators/test_jira.py +++ b/providers/tests/atlassian/jira/operators/test_jira.py @@ -20,13 +20,12 @@ from unittest import mock import pytest +from tests_common.test_utils.compat import connection_as_json from airflow.models import Connection from airflow.providers.atlassian.jira.operators.jira import JiraOperator from airflow.utils import timezone -from dev.tests_common.test_utils.compat import connection_as_json - DEFAULT_DATE = timezone.datetime(2017, 1, 1) MINIMAL_TEST_TICKET = { "id": "911539", diff --git a/providers/tests/atlassian/jira/sensors/test_jira.py b/providers/tests/atlassian/jira/sensors/test_jira.py index 9cb2572969ad4..ca027b7b5989d 100644 --- a/providers/tests/atlassian/jira/sensors/test_jira.py +++ b/providers/tests/atlassian/jira/sensors/test_jira.py @@ -20,13 +20,12 @@ from unittest import mock import pytest +from tests_common.test_utils.compat import connection_as_json from airflow.models import Connection from airflow.providers.atlassian.jira.sensors.jira import JiraTicketSensor from airflow.utils import timezone -from dev.tests_common.test_utils.compat import connection_as_json - DEFAULT_DATE = timezone.datetime(2017, 1, 1) MINIMAL_TEST_TICKET = { "id": "911539", diff --git a/providers/tests/celery/cli/test_celery_command.py b/providers/tests/celery/cli/test_celery_command.py index 03f5d60dabe40..0d0310f7452ea 100644 --- a/providers/tests/celery/cli/test_celery_command.py +++ b/providers/tests/celery/cli/test_celery_command.py @@ -24,6 +24,8 @@ import pytest import sqlalchemy +from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS +from tests_common.test_utils.config import conf_vars import airflow from airflow.cli import cli_parser @@ -31,9 +33,6 @@ from airflow.executors import executor_loader from airflow.providers.celery.cli import celery_command -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.db_test diff --git a/providers/tests/celery/executors/test_celery_executor.py b/providers/tests/celery/executors/test_celery_executor.py index 261a11f837e42..f72ee35b0f50f 100644 --- a/providers/tests/celery/executors/test_celery_executor.py +++ b/providers/tests/celery/executors/test_celery_executor.py @@ -32,6 +32,9 @@ from celery import Celery from celery.result import AsyncResult from kombu.asynchronous import set_event_loop +from tests_common.test_utils import db +from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS +from tests_common.test_utils.config import conf_vars from airflow.configuration import conf from airflow.models.baseoperator import BaseOperator @@ -42,10 +45,6 @@ from airflow.utils import timezone from airflow.utils.state import State -from dev.tests_common.test_utils import db -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.db_test diff --git a/providers/tests/celery/log_handlers/test_log_handlers.py b/providers/tests/celery/log_handlers/test_log_handlers.py index 9eb9e33e2ae0d..5d7d4d473d2fc 100644 --- a/providers/tests/celery/log_handlers/test_log_handlers.py +++ b/providers/tests/celery/log_handlers/test_log_handlers.py @@ -23,6 +23,8 @@ from unittest import mock import pytest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars from airflow.config_templates.airflow_local_settings import DEFAULT_LOGGING_CONFIG from airflow.executors import executor_loader @@ -36,9 +38,6 @@ from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars - if AIRFLOW_V_3_0_PLUS: pass diff --git a/providers/tests/cncf/kubernetes/cli/test_kubernetes_command.py b/providers/tests/cncf/kubernetes/cli/test_kubernetes_command.py index 62019a8d2fe90..4d277c3b51e2d 100644 --- a/providers/tests/cncf/kubernetes/cli/test_kubernetes_command.py +++ b/providers/tests/cncf/kubernetes/cli/test_kubernetes_command.py @@ -24,13 +24,12 @@ import kubernetes import pytest from dateutil.parser import parse +from tests_common.test_utils.config import conf_vars from airflow.cli import cli_parser from airflow.executors import executor_loader from airflow.providers.cncf.kubernetes.cli import kubernetes_command -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.db_test diff --git a/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py b/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py index 07b42ee3fc22a..85077ba1776bd 100644 --- a/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py +++ b/providers/tests/cncf/kubernetes/executors/test_kubernetes_executor.py @@ -26,6 +26,8 @@ import yaml from kubernetes.client import models as k8s from kubernetes.client.rest import ApiException +from tests_common.test_utils.compat import BashOperator +from tests_common.test_utils.config import conf_vars from urllib3 import HTTPResponse from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning @@ -55,9 +57,6 @@ from airflow.utils import timezone from airflow.utils.state import State, TaskInstanceState -from dev.tests_common.test_utils.compat import BashOperator -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.skip_if_database_isolation_mode diff --git a/providers/tests/cncf/kubernetes/hooks/test_kubernetes.py b/providers/tests/cncf/kubernetes/hooks/test_kubernetes.py index a6847e9c2fe56..d90ed0504401a 100644 --- a/providers/tests/cncf/kubernetes/hooks/test_kubernetes.py +++ b/providers/tests/cncf/kubernetes/hooks/test_kubernetes.py @@ -29,6 +29,8 @@ from kubernetes.client.rest import ApiException from kubernetes.config import ConfigException from sqlalchemy.orm import make_transient +from tests_common.test_utils.db import clear_db_connections +from tests_common.test_utils.providers import get_provider_min_airflow_version from airflow.exceptions import AirflowException, AirflowNotFoundException from airflow.hooks.base import BaseHook @@ -37,9 +39,6 @@ from airflow.utils import db from airflow.utils.db import merge_conn -from dev.tests_common.test_utils.db import clear_db_connections -from dev.tests_common.test_utils.providers import get_provider_min_airflow_version - pytestmark = pytest.mark.db_test KUBE_CONFIG_PATH = os.getenv("KUBECONFIG", "~/.kube/config") diff --git a/providers/tests/cncf/kubernetes/log_handlers/test_log_handlers.py b/providers/tests/cncf/kubernetes/log_handlers/test_log_handlers.py index b02f16ac92786..7ef025d7f29d4 100644 --- a/providers/tests/cncf/kubernetes/log_handlers/test_log_handlers.py +++ b/providers/tests/cncf/kubernetes/log_handlers/test_log_handlers.py @@ -26,6 +26,8 @@ import pytest from kubernetes.client import models as k8s +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars from airflow.config_templates.airflow_local_settings import DEFAULT_LOGGING_CONFIG from airflow.executors import executor_loader @@ -42,9 +44,6 @@ from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars - if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/providers/tests/cncf/kubernetes/operators/test_pod.py b/providers/tests/cncf/kubernetes/operators/test_pod.py index ac50641370fed..a272a5c4603f6 100644 --- a/providers/tests/cncf/kubernetes/operators/test_pod.py +++ b/providers/tests/cncf/kubernetes/operators/test_pod.py @@ -27,6 +27,7 @@ import pytest from kubernetes.client import ApiClient, V1Pod, V1PodSecurityContext, V1PodStatus, models as k8s from kubernetes.client.exceptions import ApiException +from tests_common.test_utils import db from urllib3 import HTTPResponse from airflow.exceptions import ( @@ -51,8 +52,6 @@ from airflow.utils.session import create_session from airflow.utils.types import DagRunType -from dev.tests_common.test_utils import db - pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/providers/tests/cncf/kubernetes/operators/test_spark_kubernetes.py b/providers/tests/cncf/kubernetes/operators/test_spark_kubernetes.py index 18bce53688d4f..ea15480ba1212 100644 --- a/providers/tests/cncf/kubernetes/operators/test_spark_kubernetes.py +++ b/providers/tests/cncf/kubernetes/operators/test_spark_kubernetes.py @@ -27,6 +27,7 @@ import pytest import yaml from kubernetes.client import models as k8s +from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS from airflow import DAG from airflow.models import Connection, DagRun, TaskInstance @@ -34,8 +35,6 @@ from airflow.utils import db, timezone from airflow.utils.types import DagRunType -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS - @patch("airflow.providers.cncf.kubernetes.operators.spark_kubernetes.KubernetesHook") def test_spark_kubernetes_operator(mock_kubernetes_hook, data_file): diff --git a/providers/tests/cncf/kubernetes/test_client.py b/providers/tests/cncf/kubernetes/test_client.py index 269717ffaa0f0..ce3a961eef44a 100644 --- a/providers/tests/cncf/kubernetes/test_client.py +++ b/providers/tests/cncf/kubernetes/test_client.py @@ -21,6 +21,7 @@ import pytest from kubernetes.client import Configuration +from tests_common.test_utils.config import conf_vars from urllib3.connection import HTTPConnection, HTTPSConnection from airflow.providers.cncf.kubernetes.kube_client import ( @@ -29,8 +30,6 @@ get_kube_client, ) -from dev.tests_common.test_utils.config import conf_vars - class TestClient: @mock.patch("airflow.providers.cncf.kubernetes.kube_client.config") diff --git a/providers/tests/cncf/kubernetes/test_template_rendering.py b/providers/tests/cncf/kubernetes/test_template_rendering.py index 180b7a1e2d2ef..bf67059ed11ff 100644 --- a/providers/tests/cncf/kubernetes/test_template_rendering.py +++ b/providers/tests/cncf/kubernetes/test_template_rendering.py @@ -21,6 +21,7 @@ import pytest from sqlalchemy.orm import make_transient +from tests_common.test_utils.compat import BashOperator from airflow.models.renderedtifields import RenderedTaskInstanceFields, RenderedTaskInstanceFields as RTIF from airflow.providers.cncf.kubernetes.template_rendering import get_rendered_k8s_spec, render_k8s_pod_yaml @@ -28,8 +29,6 @@ from airflow.utils.session import create_session from airflow.version import version -from dev.tests_common.test_utils.compat import BashOperator - pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] DEFAULT_DATE = timezone.datetime(2021, 9, 9) diff --git a/providers/tests/common/io/operators/test_file_transfer.py b/providers/tests/common/io/operators/test_file_transfer.py index 55a196fa8918c..4ce23de04534c 100644 --- a/providers/tests/common/io/operators/test_file_transfer.py +++ b/providers/tests/common/io/operators/test_file_transfer.py @@ -19,9 +19,9 @@ from unittest import mock -from airflow.providers.common.compat.openlineage.facet import Dataset +from tests_common.test_utils.compat import ignore_provider_compatibility_error -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from airflow.providers.common.compat.openlineage.facet import Dataset with ignore_provider_compatibility_error("2.8.0", __file__): from airflow.providers.common.io.operators.file_transfer import FileTransferOperator diff --git a/providers/tests/common/io/xcom/test_backend.py b/providers/tests/common/io/xcom/test_backend.py index ae45a2f863b78..a60fd872bc851 100644 --- a/providers/tests/common/io/xcom/test_backend.py +++ b/providers/tests/common/io/xcom/test_backend.py @@ -18,9 +18,8 @@ from __future__ import annotations import pytest - -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS, ignore_provider_compatibility_error -from dev.tests_common.test_utils.db import is_db_isolation_mode +from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS, ignore_provider_compatibility_error +from tests_common.test_utils.db import is_db_isolation_mode pytestmark = [ pytest.mark.db_test, @@ -35,12 +34,12 @@ with ignore_provider_compatibility_error("2.8.0", __file__): from airflow.providers.common.io.xcom.backend import XComObjectStorageBackend +from tests_common.test_utils import db +from tests_common.test_utils.config import conf_vars + from airflow.utils import timezone from airflow.utils.xcom import XCOM_RETURN_KEY -from dev.tests_common.test_utils import db -from dev.tests_common.test_utils.config import conf_vars - @pytest.fixture(autouse=True) def reset_db(): diff --git a/providers/tests/common/sql/hooks/test_dbapi.py b/providers/tests/common/sql/hooks/test_dbapi.py index 5017a5f347c82..0b4d7dc72e4b1 100644 --- a/providers/tests/common/sql/hooks/test_dbapi.py +++ b/providers/tests/common/sql/hooks/test_dbapi.py @@ -24,14 +24,13 @@ import pytest from pyodbc import Cursor +from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS from airflow.config_templates.airflow_local_settings import DEFAULT_LOGGING_CONFIG from airflow.hooks.base import BaseHook from airflow.models import Connection from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler, fetch_one_handler -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS - pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"), ] diff --git a/providers/tests/common/sql/hooks/test_sql.py b/providers/tests/common/sql/hooks/test_sql.py index 87673f6de62e0..9908a512f8599 100644 --- a/providers/tests/common/sql/hooks/test_sql.py +++ b/providers/tests/common/sql/hooks/test_sql.py @@ -24,6 +24,7 @@ from unittest.mock import MagicMock import pytest +from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS from airflow.config_templates.airflow_local_settings import DEFAULT_LOGGING_CONFIG from airflow.exceptions import AirflowProviderDeprecationWarning @@ -31,7 +32,6 @@ from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler from airflow.utils.session import provide_session -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS from providers.tests.common.sql.test_utils import mock_hook pytestmark = [ diff --git a/providers/tests/common/sql/hooks/test_sqlparse.py b/providers/tests/common/sql/hooks/test_sqlparse.py index b3622bd8eff21..a32f5ee20880b 100644 --- a/providers/tests/common/sql/hooks/test_sqlparse.py +++ b/providers/tests/common/sql/hooks/test_sqlparse.py @@ -17,11 +17,10 @@ from __future__ import annotations import pytest +from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS from airflow.providers.common.sql.hooks.sql import DbApiHook -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS - pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"), ] diff --git a/providers/tests/common/sql/operators/test_sql.py b/providers/tests/common/sql/operators/test_sql.py index c2e4bc6f2e86f..d2a371311c1d0 100644 --- a/providers/tests/common/sql/operators/test_sql.py +++ b/providers/tests/common/sql/operators/test_sql.py @@ -22,6 +22,7 @@ from unittest.mock import MagicMock import pytest +from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_3_0_PLUS from airflow import DAG from airflow.exceptions import AirflowException @@ -44,8 +45,6 @@ from airflow.utils.session import create_session from airflow.utils.state import State -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS, AIRFLOW_V_3_0_PLUS - if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/providers/tests/common/sql/operators/test_sql_execute.py b/providers/tests/common/sql/operators/test_sql_execute.py index bb42cbaba2026..f03c80866ca39 100644 --- a/providers/tests/common/sql/operators/test_sql_execute.py +++ b/providers/tests/common/sql/operators/test_sql_execute.py @@ -22,6 +22,7 @@ from unittest.mock import MagicMock import pytest +from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS from airflow.models import Connection from airflow.providers.common.compat.openlineage.facet import ( @@ -34,8 +35,6 @@ from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator from airflow.providers.openlineage.extractors.base import OperatorLineage -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS - pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"), ] diff --git a/providers/tests/common/sql/sensors/test_sql.py b/providers/tests/common/sql/sensors/test_sql.py index 33e8fe6399738..c44bbf1d9d1ab 100644 --- a/providers/tests/common/sql/sensors/test_sql.py +++ b/providers/tests/common/sql/sensors/test_sql.py @@ -20,6 +20,7 @@ from unittest import mock import pytest +from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS from airflow.exceptions import AirflowException from airflow.models.dag import DAG @@ -27,8 +28,6 @@ from airflow.providers.common.sql.sensors.sql import SqlSensor from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS - pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.8.0+ only"), pytest.mark.skip_if_database_isolation_mode, diff --git a/providers/tests/common/sql/test_utils.py b/providers/tests/common/sql/test_utils.py index 19b7bcc339411..50a217c87a2cc 100644 --- a/providers/tests/common/sql/test_utils.py +++ b/providers/tests/common/sql/test_utils.py @@ -21,11 +21,10 @@ from unittest import mock import pytest +from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS from airflow.models import Connection -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS - pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_8_PLUS, reason="Tests for Airflow 2.8.0+ only"), ] diff --git a/providers/tests/conftest.py b/providers/tests/conftest.py index 6ca7990e6054f..85ffd8f1d9078 100644 --- a/providers/tests/conftest.py +++ b/providers/tests/conftest.py @@ -22,7 +22,7 @@ import pytest -pytest_plugins = "dev.tests_common.pytest_plugin" +pytest_plugins = "tests_common.pytest_plugin" @pytest.hookimpl(tryfirst=True) diff --git a/providers/tests/databricks/plugins/test_databricks_workflow.py b/providers/tests/databricks/plugins/test_databricks_workflow.py index 695466c62d266..72d6b3f321f39 100644 --- a/providers/tests/databricks/plugins/test_databricks_workflow.py +++ b/providers/tests/databricks/plugins/test_databricks_workflow.py @@ -21,6 +21,7 @@ import pytest from flask import url_for +from tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES from airflow.exceptions import AirflowException from airflow.models.dagrun import DagRun @@ -41,8 +42,6 @@ ) from airflow.www.app import create_app -from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES - DAG_ID = "test_dag" TASK_ID = "test_task" RUN_ID = "test_run_1" diff --git a/providers/tests/edge/api_endpoints/test_rpc_api_endpoint.py b/providers/tests/edge/api_endpoints/test_rpc_api_endpoint.py index 9a9b4a7dd885f..4a185971e67a7 100644 --- a/providers/tests/edge/api_endpoints/test_rpc_api_endpoint.py +++ b/providers/tests/edge/api_endpoints/test_rpc_api_endpoint.py @@ -21,6 +21,8 @@ from unittest import mock import pytest +from tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from tests_common.test_utils.mock_plugins import mock_plugin_manager from airflow.api_connexion.exceptions import PermissionDenied from airflow.configuration import conf @@ -41,9 +43,6 @@ from airflow.utils.state import State from airflow.www import app -from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules -from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager - # Note: Sounds a bit strange to disable internal API tests in isolation mode but... # As long as the test is modelled to run its own internal API endpoints, it is conflicting # to the test setup with a dedicated internal API server. diff --git a/providers/tests/edge/cli/test_edge_command.py b/providers/tests/edge/cli/test_edge_command.py index af3e2c00e2998..e6e07d032baa6 100644 --- a/providers/tests/edge/cli/test_edge_command.py +++ b/providers/tests/edge/cli/test_edge_command.py @@ -23,6 +23,7 @@ import pytest import time_machine +from tests_common.test_utils.config import conf_vars from airflow.exceptions import AirflowException from airflow.providers.edge.cli.edge_command import ( @@ -34,8 +35,6 @@ from airflow.providers.edge.models.edge_worker import EdgeWorker, EdgeWorkerState from airflow.utils.state import TaskInstanceState -from dev.tests_common.test_utils.config import conf_vars - pytest.importorskip("pydantic", minversion="2.0.0") # Ignore the following error for mocking diff --git a/providers/tests/edge/plugins/test_edge_executor_plugin.py b/providers/tests/edge/plugins/test_edge_executor_plugin.py index d0c5a40770b31..bdd4847865539 100644 --- a/providers/tests/edge/plugins/test_edge_executor_plugin.py +++ b/providers/tests/edge/plugins/test_edge_executor_plugin.py @@ -19,12 +19,11 @@ import importlib import pytest +from tests_common.test_utils.config import conf_vars from airflow.plugins_manager import AirflowPlugin from airflow.providers.edge.plugins import edge_executor_plugin -from dev.tests_common.test_utils.config import conf_vars - def test_plugin_inactive(): with conf_vars({("edge", "api_enabled"): "false"}): diff --git a/providers/tests/elasticsearch/log/test_es_task_handler.py b/providers/tests/elasticsearch/log/test_es_task_handler.py index abde5daf8bf18..adb10090355b1 100644 --- a/providers/tests/elasticsearch/log/test_es_task_handler.py +++ b/providers/tests/elasticsearch/log/test_es_task_handler.py @@ -31,6 +31,8 @@ import elasticsearch import pendulum import pytest +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_dags, clear_db_runs from airflow.configuration import conf from airflow.providers.elasticsearch.log.es_response import ElasticSearchResponse @@ -44,8 +46,6 @@ from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs from providers.tests.elasticsearch.log.elasticmock import elasticmock from providers.tests.elasticsearch.log.elasticmock.utilities import SearchFailedException diff --git a/providers/tests/fab/auth_manager/api/auth/backend/test_basic_auth.py b/providers/tests/fab/auth_manager/api/auth/backend/test_basic_auth.py index 2c783e2046f88..65db6a654ae23 100644 --- a/providers/tests/fab/auth_manager/api/auth/backend/test_basic_auth.py +++ b/providers/tests/fab/auth_manager/api/auth/backend/test_basic_auth.py @@ -21,12 +21,11 @@ import pytest from flask import Response from flask_appbuilder.const import AUTH_LDAP +from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS from airflow.providers.fab.auth_manager.api.auth.backend.basic_auth import requires_authentication from airflow.www import app as application -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS - pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), ] diff --git a/providers/tests/fab/auth_manager/api/auth/backend/test_kerberos_auth.py b/providers/tests/fab/auth_manager/api/auth/backend/test_kerberos_auth.py index 7d13a89e8db80..e57f34ce4b033 100644 --- a/providers/tests/fab/auth_manager/api/auth/backend/test_kerberos_auth.py +++ b/providers/tests/fab/auth_manager/api/auth/backend/test_kerberos_auth.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.api.auth.backend.kerberos_auth import init_app diff --git a/providers/tests/fab/auth_manager/api/auth/backend/test_session.py b/providers/tests/fab/auth_manager/api/auth/backend/test_session.py index 513e8ec2aa293..405eafe11dfc4 100644 --- a/providers/tests/fab/auth_manager/api/auth/backend/test_session.py +++ b/providers/tests/fab/auth_manager/api/auth/backend/test_session.py @@ -20,12 +20,11 @@ import pytest from flask import Response +from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS from airflow.providers.fab.auth_manager.api.auth.backend.session import requires_authentication from airflow.www import app as application -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS - pytestmark = [ pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), ] diff --git a/providers/tests/fab/auth_manager/api_endpoints/api_connexion_utils.py b/providers/tests/fab/auth_manager/api_endpoints/api_connexion_utils.py index e4cbe93c9d62a..b208b845096b9 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/api_connexion_utils.py +++ b/providers/tests/fab/auth_manager/api_endpoints/api_connexion_utils.py @@ -18,7 +18,7 @@ from contextlib import contextmanager -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.security_manager.override import EXISTING_ROLES diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_asset_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_asset_endpoint.py index 5c22e5ba7ff92..104e7943f4cd1 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_asset_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_asset_endpoint.py @@ -20,14 +20,14 @@ import pytest import time_machine +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_assets, clear_db_runs +from tests_common.test_utils.www import _check_last_log from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.security import permissions from airflow.utils import timezone -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_assets, clear_db_runs -from dev.tests_common.test_utils.www import _check_last_log from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user try: diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_auth.py b/providers/tests/fab/auth_manager/api_endpoints/test_auth.py index 630ce7050bed5..fd29120070268 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_auth.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_auth.py @@ -20,12 +20,11 @@ import pytest from flask_login import current_user - -from dev.tests_common.test_utils.api_connexion_utils import assert_401 -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_pools -from dev.tests_common.test_utils.www import client_with_login +from tests_common.test_utils.api_connexion_utils import assert_401 +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_pools +from tests_common.test_utils.www import client_with_login pytestmark = [ pytest.mark.db_test, diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py index f8015ff907ba6..4d7a5fc08776a 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_backfill_endpoint.py @@ -22,6 +22,13 @@ import pendulum import pytest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import ( + clear_db_backfills, + clear_db_dags, + clear_db_runs, + clear_db_serialized_dags, +) from airflow.models import DagBag, DagModel from airflow.models.dag import DAG @@ -31,13 +38,6 @@ from airflow.utils import timezone from airflow.utils.session import provide_session -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import ( - clear_db_backfills, - clear_db_dags, - clear_db_runs, - clear_db_serialized_dags, -) from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user try: diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_cors.py b/providers/tests/fab/auth_manager/api_endpoints/test_cors.py index 8dbc4f964e61a..3741d71fb8b96 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_cors.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_cors.py @@ -19,10 +19,9 @@ from base64 import b64encode import pytest - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_pools +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_pools pytestmark = [ pytest.mark.db_test, diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_dag_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_dag_endpoint.py index e93425591f708..f2bd83efa331c 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_dag_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_dag_endpoint.py @@ -21,6 +21,9 @@ import pendulum import pytest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from tests_common.test_utils.www import _check_last_log from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.models import DagBag, DagModel @@ -29,9 +32,6 @@ from airflow.security import permissions from airflow.utils.session import provide_session -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags -from dev.tests_common.test_utils.www import _check_last_log from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py index fa09df1be74e0..f822cddf49ee0 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_dag_run_endpoint.py @@ -19,6 +19,8 @@ from datetime import timedelta import pytest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags from airflow.models.dag import DAG, DagModel from airflow.models.dagrun import DagRun @@ -28,8 +30,6 @@ from airflow.utils.session import create_session from airflow.utils.state import DagRunState -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, delete_roles, diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py index 27e41136555ea..8cfb6c58f5320 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_dag_source_endpoint.py @@ -21,12 +21,12 @@ from typing import TYPE_CHECKING import pytest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_dag_code, clear_db_dags, clear_db_serialized_dags from airflow.models import DagBag from airflow.security import permissions -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_dag_code, clear_db_dags, clear_db_serialized_dags from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py index 01a2c68091ce1..ec8a72d74ee48 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_dag_warning_endpoint.py @@ -17,14 +17,14 @@ from __future__ import annotations import pytest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_dag_warnings, clear_db_dags from airflow.models.dag import DagModel from airflow.models.dagwarning import DagWarning from airflow.security import permissions from airflow.utils.session import create_session -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_dag_warnings, clear_db_dags from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_event_log_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_event_log_endpoint.py index 4794893241f6f..7d7868582934e 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_event_log_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_event_log_endpoint.py @@ -17,13 +17,13 @@ from __future__ import annotations import pytest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_logs from airflow.models import Log from airflow.security import permissions from airflow.utils import timezone -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_logs from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_import_error_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_import_error_endpoint.py index 110e8e6302553..b9b8d4ff9a421 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_import_error_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_import_error_endpoint.py @@ -17,14 +17,14 @@ from __future__ import annotations import pytest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, ParseImportError +from tests_common.test_utils.db import clear_db_dags, clear_db_import_errors +from tests_common.test_utils.permissions import _resource_name from airflow.models.dag import DagModel from airflow.security import permissions from airflow.utils import timezone -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, ParseImportError -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_import_errors -from dev.tests_common.test_utils.permissions import _resource_name from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py index b72c6fe6612f3..986b55e5ff42e 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_role_and_permission_endpoint.py @@ -17,12 +17,12 @@ from __future__ import annotations import pytest +from tests_common.test_utils.api_connexion_utils import assert_401 +from tests_common.test_utils.compat import ignore_provider_compatibility_error from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.security import permissions -from dev.tests_common.test_utils.api_connexion_utils import assert_401 -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_role, create_user, diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py index 7d379b6f8c007..6aea6b86f7b50 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_task_instance_endpoint.py @@ -20,6 +20,8 @@ import urllib import pytest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.models import DagRun, TaskInstance @@ -29,8 +31,6 @@ from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, delete_roles, diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_user_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_user_endpoint.py index c7a20cb59c320..29f84850d9ba6 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_user_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_user_endpoint.py @@ -20,15 +20,15 @@ import pytest from sqlalchemy.sql.functions import count +from tests_common.test_utils.api_connexion_utils import assert_401 +from tests_common.test_utils.compat import ignore_provider_compatibility_error +from tests_common.test_utils.config import conf_vars from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.security import permissions from airflow.utils import timezone from airflow.utils.session import create_session -from dev.tests_common.test_utils.api_connexion_utils import assert_401 -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error -from dev.tests_common.test_utils.config import conf_vars from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, delete_role, diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_variable_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_variable_endpoint.py index 802eb4824087e..038dea9e72df9 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_variable_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_variable_endpoint.py @@ -17,12 +17,12 @@ from __future__ import annotations import pytest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_variables from airflow.models import Variable from airflow.security import permissions -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_variables from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ diff --git a/providers/tests/fab/auth_manager/api_endpoints/test_xcom_endpoint.py b/providers/tests/fab/auth_manager/api_endpoints/test_xcom_endpoint.py index 06e8ee4847762..f45cf58de6a95 100644 --- a/providers/tests/fab/auth_manager/api_endpoints/test_xcom_endpoint.py +++ b/providers/tests/fab/auth_manager/api_endpoints/test_xcom_endpoint.py @@ -19,6 +19,8 @@ from datetime import timedelta import pytest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom from airflow.models.dag import DagModel from airflow.models.dagrun import DagRun @@ -30,8 +32,6 @@ from airflow.utils.session import create_session from airflow.utils.types import DagRunType -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ diff --git a/providers/tests/fab/auth_manager/cli_commands/test_definition.py b/providers/tests/fab/auth_manager/cli_commands/test_definition.py index de906bef1ba30..572cbee05e3db 100644 --- a/providers/tests/fab/auth_manager/cli_commands/test_definition.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_definition.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands.definition import ( diff --git a/providers/tests/fab/auth_manager/cli_commands/test_role_command.py b/providers/tests/fab/auth_manager/cli_commands/test_role_command.py index fd176702c7219..d07cfc61242fe 100644 --- a/providers/tests/fab/auth_manager/cli_commands/test_role_command.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_role_command.py @@ -23,12 +23,11 @@ from typing import TYPE_CHECKING import pytest +from tests_common.test_utils.compat import ignore_provider_compatibility_error +from tests_common.test_utils.config import conf_vars from airflow.cli import cli_parser -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error -from dev.tests_common.test_utils.config import conf_vars - with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands import role_command from airflow.providers.fab.auth_manager.cli_commands.utils import get_application_builder diff --git a/providers/tests/fab/auth_manager/cli_commands/test_sync_perm_command.py b/providers/tests/fab/auth_manager/cli_commands/test_sync_perm_command.py index e0e4a70f4ac88..a0345909dc2df 100644 --- a/providers/tests/fab/auth_manager/cli_commands/test_sync_perm_command.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_sync_perm_command.py @@ -20,11 +20,10 @@ from unittest import mock import pytest +from tests_common.test_utils.compat import ignore_provider_compatibility_error from airflow.cli import cli_parser -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error - with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands import sync_perm_command diff --git a/providers/tests/fab/auth_manager/cli_commands/test_user_command.py b/providers/tests/fab/auth_manager/cli_commands/test_user_command.py index 5f2f66b0866a2..6ccd4c99716ab 100644 --- a/providers/tests/fab/auth_manager/cli_commands/test_user_command.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_user_command.py @@ -24,11 +24,10 @@ from io import StringIO import pytest +from tests_common.test_utils.compat import ignore_provider_compatibility_error from airflow.cli import cli_parser -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error - with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands import user_command from airflow.providers.fab.auth_manager.cli_commands.utils import get_application_builder diff --git a/providers/tests/fab/auth_manager/cli_commands/test_utils.py b/providers/tests/fab/auth_manager/cli_commands/test_utils.py index f52defae6bb8f..193317249ed11 100644 --- a/providers/tests/fab/auth_manager/cli_commands/test_utils.py +++ b/providers/tests/fab/auth_manager/cli_commands/test_utils.py @@ -17,8 +17,7 @@ from __future__ import annotations import pytest - -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.cli_commands.utils import get_application_builder diff --git a/providers/tests/fab/auth_manager/conftest.py b/providers/tests/fab/auth_manager/conftest.py index 9102f5d0f65d4..5818ecf435db6 100644 --- a/providers/tests/fab/auth_manager/conftest.py +++ b/providers/tests/fab/auth_manager/conftest.py @@ -17,12 +17,11 @@ from __future__ import annotations import pytest +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.decorators import dont_initialize_flask_app_submodules from airflow.www import app -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules - @pytest.fixture(scope="session") def minimal_app_for_auth_api(): diff --git a/providers/tests/fab/auth_manager/decorators/test_auth.py b/providers/tests/fab/auth_manager/decorators/test_auth.py index 202f0d6227073..d6f60bf42f1fb 100644 --- a/providers/tests/fab/auth_manager/decorators/test_auth.py +++ b/providers/tests/fab/auth_manager/decorators/test_auth.py @@ -19,11 +19,10 @@ from unittest.mock import Mock, patch import pytest +from tests_common.test_utils.compat import ignore_provider_compatibility_error from airflow.security.permissions import ACTION_CAN_READ, RESOURCE_DAG -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error - permissions = [(ACTION_CAN_READ, RESOURCE_DAG)] with ignore_provider_compatibility_error("2.9.0+", __file__): diff --git a/providers/tests/fab/auth_manager/models/test_anonymous_user.py b/providers/tests/fab/auth_manager/models/test_anonymous_user.py index 419d17aa9f3f9..eaf6b357f9264 100644 --- a/providers/tests/fab/auth_manager/models/test_anonymous_user.py +++ b/providers/tests/fab/auth_manager/models/test_anonymous_user.py @@ -17,7 +17,7 @@ # under the License. from __future__ import annotations -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.models.anonymous_user import AnonymousUser diff --git a/providers/tests/fab/auth_manager/schemas/test_user_schema.py b/providers/tests/fab/auth_manager/schemas/test_user_schema.py index 648372d3baebd..9f8dbd3742fa8 100644 --- a/providers/tests/fab/auth_manager/schemas/test_user_schema.py +++ b/providers/tests/fab/auth_manager/schemas/test_user_schema.py @@ -17,10 +17,10 @@ from __future__ import annotations import pytest +from tests_common.test_utils.compat import ignore_provider_compatibility_error from airflow.utils import timezone -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_role, delete_role with ignore_provider_compatibility_error("2.9.0+", __file__): diff --git a/providers/tests/fab/auth_manager/security_manager/test_constants.py b/providers/tests/fab/auth_manager/security_manager/test_constants.py index a6566fd987109..dbe592c59d747 100644 --- a/providers/tests/fab/auth_manager/security_manager/test_constants.py +++ b/providers/tests/fab/auth_manager/security_manager/test_constants.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.security_manager.constants import EXISTING_ROLES diff --git a/providers/tests/fab/auth_manager/security_manager/test_override.py b/providers/tests/fab/auth_manager/security_manager/test_override.py index 2733b688d1ae5..6ba1ccda292cd 100644 --- a/providers/tests/fab/auth_manager/security_manager/test_override.py +++ b/providers/tests/fab/auth_manager/security_manager/test_override.py @@ -19,7 +19,7 @@ from unittest import mock from unittest.mock import Mock -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.security_manager.override import FabAirflowSecurityManagerOverride diff --git a/providers/tests/fab/auth_manager/test_fab_auth_manager.py b/providers/tests/fab/auth_manager/test_fab_auth_manager.py index 064c8e0dfd4c7..0d111f0453ad4 100644 --- a/providers/tests/fab/auth_manager/test_fab_auth_manager.py +++ b/providers/tests/fab/auth_manager/test_fab_auth_manager.py @@ -31,7 +31,7 @@ except ImportError: pass -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.fab_auth_manager import FabAuthManager diff --git a/providers/tests/fab/auth_manager/test_models.py b/providers/tests/fab/auth_manager/test_models.py index 6f03be373187b..76b69c3dea734 100644 --- a/providers/tests/fab/auth_manager/test_models.py +++ b/providers/tests/fab/auth_manager/test_models.py @@ -19,8 +19,7 @@ from unittest import mock from sqlalchemy import Column, MetaData, String, Table - -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.models import ( diff --git a/providers/tests/fab/auth_manager/test_security.py b/providers/tests/fab/auth_manager/test_security.py index 8c9e221b71df0..8156999f01615 100644 --- a/providers/tests/fab/auth_manager/test_security.py +++ b/providers/tests/fab/auth_manager/test_security.py @@ -31,19 +31,23 @@ from flask_appbuilder import SQLA, Model, expose, has_access from flask_appbuilder.views import BaseView, ModelView from sqlalchemy import Column, Date, Float, Integer, String +from tests_common.test_utils.compat import ignore_provider_compatibility_error from airflow.configuration import initialize_config from airflow.exceptions import AirflowException from airflow.models import DagModel from airflow.models.dag import DAG -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error - with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.fab_auth_manager import FabAuthManager from airflow.providers.fab.auth_manager.models import assoc_permission_role from airflow.providers.fab.auth_manager.models.anonymous_user import AnonymousUser +from tests_common.test_utils.asserts import assert_queries_count +from tests_common.test_utils.db import clear_db_dags, clear_db_runs +from tests_common.test_utils.mock_security_manager import MockSecurityManager +from tests_common.test_utils.permissions import _resource_name + from airflow.security import permissions from airflow.security.permissions import ACTION_CAN_READ from airflow.www import app as application @@ -51,10 +55,6 @@ from airflow.www.extensions.init_auth_manager import get_auth_manager from airflow.www.utils import CustomSQLAInterface -from dev.tests_common.test_utils.asserts import assert_queries_count -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs -from dev.tests_common.test_utils.mock_security_manager import MockSecurityManager -from dev.tests_common.test_utils.permissions import _resource_name from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, create_user_scope, diff --git a/providers/tests/fab/auth_manager/views/test_permissions.py b/providers/tests/fab/auth_manager/views/test_permissions.py index 2ac26ffe45e87..eeb005b47e69a 100644 --- a/providers/tests/fab/auth_manager/views/test_permissions.py +++ b/providers/tests/fab/auth_manager/views/test_permissions.py @@ -18,12 +18,12 @@ from __future__ import annotations import pytest +from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from tests_common.test_utils.www import client_with_login from airflow.security import permissions from airflow.www import app as application -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS -from dev.tests_common.test_utils.www import client_with_login from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ diff --git a/providers/tests/fab/auth_manager/views/test_roles_list.py b/providers/tests/fab/auth_manager/views/test_roles_list.py index 3c1509d75350a..f94907c3e0504 100644 --- a/providers/tests/fab/auth_manager/views/test_roles_list.py +++ b/providers/tests/fab/auth_manager/views/test_roles_list.py @@ -18,12 +18,12 @@ from __future__ import annotations import pytest +from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from tests_common.test_utils.www import client_with_login from airflow.security import permissions from airflow.www import app as application -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS -from dev.tests_common.test_utils.www import client_with_login from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ diff --git a/providers/tests/fab/auth_manager/views/test_user.py b/providers/tests/fab/auth_manager/views/test_user.py index ee068f7f319fe..140d7cd331255 100644 --- a/providers/tests/fab/auth_manager/views/test_user.py +++ b/providers/tests/fab/auth_manager/views/test_user.py @@ -18,12 +18,12 @@ from __future__ import annotations import pytest +from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from tests_common.test_utils.www import client_with_login from airflow.security import permissions from airflow.www import app as application -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS -from dev.tests_common.test_utils.www import client_with_login from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ diff --git a/providers/tests/fab/auth_manager/views/test_user_edit.py b/providers/tests/fab/auth_manager/views/test_user_edit.py index 7cdc1a493b007..fb994dbd66ec7 100644 --- a/providers/tests/fab/auth_manager/views/test_user_edit.py +++ b/providers/tests/fab/auth_manager/views/test_user_edit.py @@ -18,12 +18,12 @@ from __future__ import annotations import pytest +from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from tests_common.test_utils.www import client_with_login from airflow.security import permissions from airflow.www import app as application -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS -from dev.tests_common.test_utils.www import client_with_login from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ diff --git a/providers/tests/fab/auth_manager/views/test_user_stats.py b/providers/tests/fab/auth_manager/views/test_user_stats.py index e50bc87535a48..bb7d627fdaa64 100644 --- a/providers/tests/fab/auth_manager/views/test_user_stats.py +++ b/providers/tests/fab/auth_manager/views/test_user_stats.py @@ -18,12 +18,12 @@ from __future__ import annotations import pytest +from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from tests_common.test_utils.www import client_with_login from airflow.security import permissions from airflow.www import app as application -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS -from dev.tests_common.test_utils.www import client_with_login from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user, delete_user pytestmark = [ diff --git a/providers/tests/google/cloud/hooks/test_bigquery_system.py b/providers/tests/google/cloud/hooks/test_bigquery_system.py index 676ab35035839..454ab63079794 100644 --- a/providers/tests/google/cloud/hooks/test_bigquery_system.py +++ b/providers/tests/google/cloud/hooks/test_bigquery_system.py @@ -20,10 +20,10 @@ from __future__ import annotations import pytest +from tests_common.test_utils.gcp_system_helpers import GoogleSystemTest from airflow.providers.google.cloud.hooks import bigquery as hook -from dev.tests_common.test_utils.gcp_system_helpers import GoogleSystemTest from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY diff --git a/providers/tests/google/cloud/hooks/test_kms_system.py b/providers/tests/google/cloud/hooks/test_kms_system.py index 374fabb5246dd..36c5bfa207f3a 100644 --- a/providers/tests/google/cloud/hooks/test_kms_system.py +++ b/providers/tests/google/cloud/hooks/test_kms_system.py @@ -21,10 +21,10 @@ from tempfile import TemporaryDirectory import pytest +from tests_common.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context from airflow.providers.google.cloud.hooks.kms import CloudKMSHook -from dev.tests_common.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context from providers.tests.google.cloud.utils.gcp_authenticator import GCP_KMS_KEY # To prevent resource name collisions, key ring and key resources CANNOT be deleted, so diff --git a/providers/tests/google/cloud/hooks/test_secret_manager_system.py b/providers/tests/google/cloud/hooks/test_secret_manager_system.py index d13305f52b42e..9f512f5f23c8f 100644 --- a/providers/tests/google/cloud/hooks/test_secret_manager_system.py +++ b/providers/tests/google/cloud/hooks/test_secret_manager_system.py @@ -19,10 +19,10 @@ import os import pytest +from tests_common.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context from airflow.providers.google.cloud.hooks.secret_manager import SecretsManagerHook -from dev.tests_common.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context from providers.tests.google.cloud.utils.gcp_authenticator import GCP_SECRET_MANAGER_KEY TEST_SECRET_ID = os.environ.get("GCP_SECRET_MANAGER_SECRET_ID", "test-secret") diff --git a/providers/tests/google/cloud/log/test_gcs_task_handler.py b/providers/tests/google/cloud/log/test_gcs_task_handler.py index 9f27c618de29d..369f4b888058b 100644 --- a/providers/tests/google/cloud/log/test_gcs_task_handler.py +++ b/providers/tests/google/cloud/log/test_gcs_task_handler.py @@ -23,14 +23,13 @@ from unittest.mock import MagicMock import pytest +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_dags, clear_db_runs from airflow.providers.google.cloud.log.gcs_task_handler import GCSTaskHandler from airflow.utils.state import TaskInstanceState from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs - @pytest.mark.db_test class TestGCSTaskHandler: diff --git a/providers/tests/google/cloud/log/test_gcs_task_handler_system.py b/providers/tests/google/cloud/log/test_gcs_task_handler_system.py index eb1ce08d14f5c..e3e2bd2cafccc 100644 --- a/providers/tests/google/cloud/log/test_gcs_task_handler_system.py +++ b/providers/tests/google/cloud/log/test_gcs_task_handler_system.py @@ -23,6 +23,13 @@ from unittest import mock import pytest +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_connections, clear_db_runs +from tests_common.test_utils.gcp_system_helpers import ( + GoogleSystemTest, + provide_gcp_context, + resolve_full_gcp_key_path, +) from airflow import settings from airflow.example_dags import example_complex @@ -30,13 +37,6 @@ from airflow.utils.log.log_reader import TaskLogReader from airflow.utils.session import provide_session -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_connections, clear_db_runs -from dev.tests_common.test_utils.gcp_system_helpers import ( - GoogleSystemTest, - provide_gcp_context, - resolve_full_gcp_key_path, -) from providers.tests.google.cloud.utils.gcp_authenticator import GCP_GCS_KEY diff --git a/providers/tests/google/cloud/log/test_stackdriver_task_handler.py b/providers/tests/google/cloud/log/test_stackdriver_task_handler.py index 783f1a34b762c..60752f3b0d3f3 100644 --- a/providers/tests/google/cloud/log/test_stackdriver_task_handler.py +++ b/providers/tests/google/cloud/log/test_stackdriver_task_handler.py @@ -24,16 +24,15 @@ import pytest from google.cloud.logging import Resource from google.cloud.logging_v2.types import ListLogEntriesRequest, ListLogEntriesResponse, LogEntry +from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_dags, clear_db_runs from airflow.exceptions import RemovedInAirflow3Warning from airflow.providers.google.cloud.log.stackdriver_task_handler import StackdriverTaskHandler from airflow.utils import timezone from airflow.utils.state import TaskInstanceState -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs - def _create_list_log_entries_response_mock(messages, token): return ListLogEntriesResponse( diff --git a/providers/tests/google/cloud/log/test_stackdriver_task_handler_system.py b/providers/tests/google/cloud/log/test_stackdriver_task_handler_system.py index a53dd43d08feb..da01e9c770570 100644 --- a/providers/tests/google/cloud/log/test_stackdriver_task_handler_system.py +++ b/providers/tests/google/cloud/log/test_stackdriver_task_handler_system.py @@ -23,6 +23,13 @@ from unittest import mock import pytest +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_runs +from tests_common.test_utils.gcp_system_helpers import ( + GoogleSystemTest, + provide_gcp_context, + resolve_full_gcp_key_path, +) from airflow import settings from airflow.example_dags import example_complex @@ -30,13 +37,6 @@ from airflow.utils.log.log_reader import TaskLogReader from airflow.utils.session import provide_session -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_runs -from dev.tests_common.test_utils.gcp_system_helpers import ( - GoogleSystemTest, - provide_gcp_context, - resolve_full_gcp_key_path, -) from providers.tests.google.cloud.utils.gcp_authenticator import GCP_STACKDRIVER diff --git a/providers/tests/google/cloud/operators/test_bigquery.py b/providers/tests/google/cloud/operators/test_bigquery.py index 3836d737662fc..1edc67c45a761 100644 --- a/providers/tests/google/cloud/operators/test_bigquery.py +++ b/providers/tests/google/cloud/operators/test_bigquery.py @@ -27,6 +27,12 @@ import pytest from google.cloud.bigquery import DEFAULT_RETRY, ScalarQueryParameter from google.cloud.exceptions import Conflict +from tests_common.test_utils.db import ( + clear_db_dags, + clear_db_runs, + clear_db_serialized_dags, + clear_db_xcom, +) from airflow.exceptions import ( AirflowException, @@ -75,13 +81,6 @@ from airflow.serialization.serialized_objects import SerializedDAG from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.db import ( - clear_db_dags, - clear_db_runs, - clear_db_serialized_dags, - clear_db_xcom, -) - pytestmark = pytest.mark.db_test diff --git a/providers/tests/google/cloud/operators/test_bigquery_dts.py b/providers/tests/google/cloud/operators/test_bigquery_dts.py index f50c42805b49d..b196237b5bf84 100644 --- a/providers/tests/google/cloud/operators/test_bigquery_dts.py +++ b/providers/tests/google/cloud/operators/test_bigquery_dts.py @@ -21,6 +21,7 @@ from google.api_core.gapic_v1.method import DEFAULT from google.cloud.bigquery_datatransfer_v1 import StartManualTransferRunsResponse, TransferConfig, TransferRun +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS from airflow.providers.google.cloud.operators.bigquery_dts import ( BigQueryCreateDataTransferOperator, @@ -28,8 +29,6 @@ BigQueryDeleteDataTransferConfigOperator, ) -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS - PROJECT_ID = "id" diff --git a/providers/tests/google/cloud/operators/test_dataprep_system.py b/providers/tests/google/cloud/operators/test_dataprep_system.py index dad77ac4ff806..9706859f02577 100644 --- a/providers/tests/google/cloud/operators/test_dataprep_system.py +++ b/providers/tests/google/cloud/operators/test_dataprep_system.py @@ -21,14 +21,13 @@ import os import pytest +from tests_common.test_utils.db import clear_db_connections +from tests_common.test_utils.gcp_system_helpers import GoogleSystemTest +from tests_common.test_utils.system_tests import get_test_run from airflow.models import Connection from airflow.utils.session import create_session -from dev.tests_common.test_utils.db import clear_db_connections -from dev.tests_common.test_utils.gcp_system_helpers import GoogleSystemTest -from dev.tests_common.test_utils.system_tests import get_test_run - TOKEN = os.environ.get("DATAPREP_TOKEN") EXTRA = {"token": TOKEN} diff --git a/providers/tests/google/cloud/operators/test_dataproc.py b/providers/tests/google/cloud/operators/test_dataproc.py index cf4bffa3a0929..a8b747d564a6f 100644 --- a/providers/tests/google/cloud/operators/test_dataproc.py +++ b/providers/tests/google/cloud/operators/test_dataproc.py @@ -27,6 +27,8 @@ from google.api_core.retry_async import AsyncRetry from google.cloud import dataproc from google.cloud.dataproc_v1 import Batch, Cluster, JobStatus +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, AIRFLOW_VERSION +from tests_common.test_utils.db import clear_db_runs, clear_db_xcom from airflow.exceptions import ( AirflowException, @@ -80,9 +82,6 @@ from airflow.serialization.serialized_objects import SerializedDAG from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, AIRFLOW_VERSION -from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom - AIRFLOW_VERSION_LABEL = "v" + str(AIRFLOW_VERSION).replace(".", "-").replace("+", "-") cluster_params = inspect.signature(ClusterGenerator.__init__).parameters diff --git a/providers/tests/google/cloud/operators/test_datastore_system.py b/providers/tests/google/cloud/operators/test_datastore_system.py index 8807288358bae..3a9e82241146c 100644 --- a/providers/tests/google/cloud/operators/test_datastore_system.py +++ b/providers/tests/google/cloud/operators/test_datastore_system.py @@ -20,12 +20,12 @@ import os import pytest - -from dev.tests_common.test_utils.gcp_system_helpers import ( +from tests_common.test_utils.gcp_system_helpers import ( CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context, ) + from providers.tests.google.cloud.utils.gcp_authenticator import GCP_DATASTORE_KEY BUCKET = os.environ.get("GCP_DATASTORE_BUCKET", "datastore-system-test") diff --git a/providers/tests/google/cloud/operators/test_looker.py b/providers/tests/google/cloud/operators/test_looker.py index b368259c503e0..faefc0eca5a08 100644 --- a/providers/tests/google/cloud/operators/test_looker.py +++ b/providers/tests/google/cloud/operators/test_looker.py @@ -20,14 +20,13 @@ from unittest.mock import MagicMock import pytest +from tests_common.test_utils.db import clear_db_runs, clear_db_xcom from airflow.exceptions import AirflowException from airflow.models import DAG, DagBag from airflow.providers.google.cloud.operators.looker import LookerStartPdtBuildOperator from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom - OPERATOR_PATH = "airflow.providers.google.cloud.operators.looker.{}" TASK_ID = "task-id" diff --git a/providers/tests/google/cloud/secrets/test_secret_manager_system.py b/providers/tests/google/cloud/secrets/test_secret_manager_system.py index b9b8b6d0cac82..04b7a9fe1f56f 100644 --- a/providers/tests/google/cloud/secrets/test_secret_manager_system.py +++ b/providers/tests/google/cloud/secrets/test_secret_manager_system.py @@ -22,8 +22,8 @@ from unittest import mock import pytest +from tests_common.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context -from dev.tests_common.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context from providers.tests.google.cloud.utils.gcp_authenticator import GCP_SECRET_MANAGER_KEY BACKEND_IMPORT_PATH = "airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend" diff --git a/providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs_system.py b/providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs_system.py index 9cb0a9be5264e..4942a36648c21 100644 --- a/providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs_system.py +++ b/providers/tests/google/cloud/transfers/test_facebook_ads_to_gcs_system.py @@ -22,17 +22,17 @@ from contextlib import contextmanager import pytest +from tests_common.test_utils.gcp_system_helpers import ( + GoogleSystemTest, + provide_gcp_context, +) +from tests_common.test_utils.system_tests import get_test_run from airflow.exceptions import AirflowException from airflow.models import Connection from airflow.providers.google.cloud.example_dags import example_facebook_ads_to_gcs from airflow.utils.process_utils import patch_environ -from dev.tests_common.test_utils.gcp_system_helpers import ( - GoogleSystemTest, - provide_gcp_context, -) -from dev.tests_common.test_utils.system_tests import get_test_run from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY CREDENTIALS_DIR = os.environ.get("CREDENTIALS_DIR", "/files/airflow-breeze-config/keys") diff --git a/providers/tests/google/cloud/transfers/test_salesforce_to_gcs_system.py b/providers/tests/google/cloud/transfers/test_salesforce_to_gcs_system.py index d556f2d86e2f2..e525ca1adde21 100644 --- a/providers/tests/google/cloud/transfers/test_salesforce_to_gcs_system.py +++ b/providers/tests/google/cloud/transfers/test_salesforce_to_gcs_system.py @@ -19,15 +19,15 @@ import os import pytest - -from airflow.providers.google.cloud.example_dags import example_salesforce_to_gcs - -from dev.tests_common.test_utils.gcp_system_helpers import ( +from tests_common.test_utils.gcp_system_helpers import ( GoogleSystemTest, provide_gcp_context, ) -from dev.tests_common.test_utils.salesforce_system_helpers import provide_salesforce_connection -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.salesforce_system_helpers import provide_salesforce_connection +from tests_common.test_utils.system_tests import get_test_run + +from airflow.providers.google.cloud.example_dags import example_salesforce_to_gcs + from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY CREDENTIALS_DIR = os.environ.get("CREDENTIALS_DIR", "/files/airflow-breeze-config/keys") diff --git a/providers/tests/google/cloud/utils/gcp_authenticator.py b/providers/tests/google/cloud/utils/gcp_authenticator.py index 6bb11b260644c..36bca98e9a6a7 100644 --- a/providers/tests/google/cloud/utils/gcp_authenticator.py +++ b/providers/tests/google/cloud/utils/gcp_authenticator.py @@ -21,13 +21,13 @@ import os import subprocess +from tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from tests_common.test_utils.logging_command_executor import CommandExecutor + from airflow import settings from airflow.exceptions import AirflowException from airflow.models import Connection -from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER -from dev.tests_common.test_utils.logging_command_executor import CommandExecutor - # Please keep these variables in alphabetical order. GCP_AI_KEY = "gcp_ai.json" diff --git a/providers/tests/google/common/auth_backend/test_google_openid.py b/providers/tests/google/common/auth_backend/test_google_openid.py index 67b0ff2003d22..02f08e6107070 100644 --- a/providers/tests/google/common/auth_backend/test_google_openid.py +++ b/providers/tests/google/common/auth_backend/test_google_openid.py @@ -20,14 +20,13 @@ import pytest from google.auth.exceptions import GoogleAuthError +from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_pools +from tests_common.test_utils.decorators import dont_initialize_flask_app_submodules from airflow.www.app import create_app -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_pools -from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules - @pytest.fixture(scope="module") def google_openid_app(): diff --git a/providers/tests/google/marketing_platform/operators/test_display_video_system.py b/providers/tests/google/marketing_platform/operators/test_display_video_system.py index 49f44948abfdf..57e93a0544a15 100644 --- a/providers/tests/google/marketing_platform/operators/test_display_video_system.py +++ b/providers/tests/google/marketing_platform/operators/test_display_video_system.py @@ -17,6 +17,12 @@ from __future__ import annotations import pytest +from tests_common.test_utils.gcp_system_helpers import ( + MARKETING_DAG_FOLDER, + GoogleSystemTest, + provide_gcp_context, +) +from tests_common.test_utils.system_tests import get_test_run from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook from airflow.providers.google.marketing_platform.example_dags.example_display_video import ( @@ -25,12 +31,6 @@ dag_example_display_video_sdf, ) -from dev.tests_common.test_utils.gcp_system_helpers import ( - MARKETING_DAG_FOLDER, - GoogleSystemTest, - provide_gcp_context, -) -from dev.tests_common.test_utils.system_tests import get_test_run from providers.tests.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY, GMP_KEY # Requires the following scope: diff --git a/providers/tests/hashicorp/hooks/test_vault.py b/providers/tests/hashicorp/hooks/test_vault.py index 442a656d20064..8aa555567bb4c 100644 --- a/providers/tests/hashicorp/hooks/test_vault.py +++ b/providers/tests/hashicorp/hooks/test_vault.py @@ -22,13 +22,12 @@ import pytest from hvac.exceptions import VaultError +from tests_common.test_utils.config import conf_vars from airflow.configuration import AirflowConfigParser from airflow.exceptions import AirflowConfigException from airflow.providers.hashicorp.hooks.vault import VaultHook -from dev.tests_common.test_utils.config import conf_vars - class TestVaultHook: @staticmethod diff --git a/providers/tests/imap/hooks/test_imap.py b/providers/tests/imap/hooks/test_imap.py index 2971cfd1924c6..9d2fcad5c74b3 100644 --- a/providers/tests/imap/hooks/test_imap.py +++ b/providers/tests/imap/hooks/test_imap.py @@ -22,14 +22,13 @@ from unittest.mock import Mock, mock_open, patch import pytest +from tests_common.test_utils.config import conf_vars from airflow.exceptions import AirflowException from airflow.models import Connection from airflow.providers.imap.hooks.imap import ImapHook from airflow.utils import db -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.db_test diff --git a/providers/tests/microsoft/azure/log/test_wasb_task_handler.py b/providers/tests/microsoft/azure/log/test_wasb_task_handler.py index 224961efe4336..2200a65f4d0d7 100644 --- a/providers/tests/microsoft/azure/log/test_wasb_task_handler.py +++ b/providers/tests/microsoft/azure/log/test_wasb_task_handler.py @@ -24,15 +24,14 @@ import pytest from azure.common import AzureHttpError +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_dags, clear_db_runs from airflow.providers.microsoft.azure.hooks.wasb import WasbHook from airflow.providers.microsoft.azure.log.wasb_task_handler import WasbTaskHandler from airflow.utils.state import TaskInstanceState from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs - pytestmark = pytest.mark.db_test diff --git a/providers/tests/mongo/hooks/test_mongo.py b/providers/tests/mongo/hooks/test_mongo.py index 78756b0cc4890..9cd5b4f0677d5 100644 --- a/providers/tests/mongo/hooks/test_mongo.py +++ b/providers/tests/mongo/hooks/test_mongo.py @@ -23,13 +23,12 @@ import pymongo import pytest +from tests_common.test_utils.compat import connection_as_json from airflow.exceptions import AirflowConfigException, AirflowProviderDeprecationWarning from airflow.models import Connection from airflow.providers.mongo.hooks.mongo import MongoHook -from dev.tests_common.test_utils.compat import connection_as_json - pytestmark = pytest.mark.db_test if TYPE_CHECKING: diff --git a/providers/tests/mysql/hooks/test_mysql.py b/providers/tests/mysql/hooks/test_mysql.py index 23748ae9cac19..49358ac30dcff 100644 --- a/providers/tests/mysql/hooks/test_mysql.py +++ b/providers/tests/mysql/hooks/test_mysql.py @@ -36,9 +36,9 @@ pytest.skip("MySQL not available", allow_module_level=True) -from airflow.utils import timezone +from tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces -from dev.tests_common.test_utils.asserts import assert_equal_ignore_multiple_spaces +from airflow.utils import timezone SSL_DICT = {"cert": "/tmp/client-cert.pem", "ca": "/tmp/server-ca.pem", "key": "/tmp/client-key.pem"} diff --git a/providers/tests/openlineage/extractors/test_bash.py b/providers/tests/openlineage/extractors/test_bash.py index d4fcdf7af2b7a..8782ce161c637 100644 --- a/providers/tests/openlineage/extractors/test_bash.py +++ b/providers/tests/openlineage/extractors/test_bash.py @@ -23,13 +23,12 @@ import pytest from openlineage.client.facet_v2 import source_code_job +from tests_common.test_utils.compat import BashOperator from airflow import DAG from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.openlineage.extractors.bash import BashExtractor -from dev.tests_common.test_utils.compat import BashOperator - pytestmark = pytest.mark.db_test with DAG( diff --git a/providers/tests/openlineage/extractors/test_manager.py b/providers/tests/openlineage/extractors/test_manager.py index 6bbf303e32857..dbd4a8e40368c 100644 --- a/providers/tests/openlineage/extractors/test_manager.py +++ b/providers/tests/openlineage/extractors/test_manager.py @@ -24,6 +24,7 @@ import pytest from openlineage.client.event_v2 import Dataset as OpenLineageDataset from openlineage.client.facet_v2 import documentation_dataset, ownership_dataset, schema_dataset +from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS from airflow.io.path import ObjectStoragePath from airflow.lineage.entities import Column, File, Table, User @@ -35,8 +36,6 @@ from airflow.providers.openlineage.utils.utils import Asset from airflow.utils.state import State -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS - if TYPE_CHECKING: from airflow.utils.context import Context diff --git a/providers/tests/openlineage/extractors/test_python.py b/providers/tests/openlineage/extractors/test_python.py index ef4fc0b861e7c..92a9d073a1109 100644 --- a/providers/tests/openlineage/extractors/test_python.py +++ b/providers/tests/openlineage/extractors/test_python.py @@ -25,14 +25,13 @@ import pytest from openlineage.client.facet_v2 import source_code_job +from tests_common.test_utils.compat import BashOperator from airflow import DAG from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.operators.python import PythonOperator from airflow.providers.openlineage.extractors.python import PythonExtractor -from dev.tests_common.test_utils.compat import BashOperator - pytestmark = pytest.mark.db_test dag = DAG( diff --git a/providers/tests/openlineage/plugins/test_adapter.py b/providers/tests/openlineage/plugins/test_adapter.py index 88f2250c638ef..8d57fad296279 100644 --- a/providers/tests/openlineage/plugins/test_adapter.py +++ b/providers/tests/openlineage/plugins/test_adapter.py @@ -36,6 +36,8 @@ processing_engine_run, sql_job, ) +from tests_common.test_utils.compat import BashOperator +from tests_common.test_utils.config import conf_vars from airflow import DAG from airflow.models.dagrun import DagRun, DagRunState @@ -52,9 +54,6 @@ from airflow.providers.openlineage.utils.utils import get_airflow_job_facet from airflow.utils.task_group import TaskGroup -from dev.tests_common.test_utils.compat import BashOperator -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.db_test diff --git a/providers/tests/openlineage/plugins/test_execution.py b/providers/tests/openlineage/plugins/test_execution.py index c308047b1bc0d..09262adc4a6f9 100644 --- a/providers/tests/openlineage/plugins/test_execution.py +++ b/providers/tests/openlineage/plugins/test_execution.py @@ -26,6 +26,8 @@ from unittest import mock import pytest +from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars from airflow.jobs.job import Job from airflow.jobs.local_task_job_runner import LocalTaskJobRunner @@ -37,9 +39,6 @@ from airflow.utils import timezone from airflow.utils.state import State -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars - if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/providers/tests/openlineage/plugins/test_listener.py b/providers/tests/openlineage/plugins/test_listener.py index 0c1651ccf0277..b859160cf8424 100644 --- a/providers/tests/openlineage/plugins/test_listener.py +++ b/providers/tests/openlineage/plugins/test_listener.py @@ -29,6 +29,8 @@ from openlineage.client import OpenLineageClient from openlineage.client.transport import ConsoleTransport from openlineage.client.transport.console import ConsoleConfig +from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars from airflow.models import DAG, DagRun, TaskInstance from airflow.models.baseoperator import BaseOperator @@ -39,9 +41,6 @@ from airflow.providers.openlineage.utils.selective_enable import disable_lineage, enable_lineage from airflow.utils.state import DagRunState, State -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars - if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/providers/tests/openlineage/plugins/test_openlineage.py b/providers/tests/openlineage/plugins/test_openlineage.py index 8685326a29432..e1c36001ebfeb 100644 --- a/providers/tests/openlineage/plugins/test_openlineage.py +++ b/providers/tests/openlineage/plugins/test_openlineage.py @@ -22,9 +22,8 @@ from unittest.mock import patch import pytest - -from dev.tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES -from dev.tests_common.test_utils.config import conf_vars +from tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES +from tests_common.test_utils.config import conf_vars @pytest.mark.skipif( diff --git a/providers/tests/openlineage/plugins/test_utils.py b/providers/tests/openlineage/plugins/test_utils.py index 444839ccaef30..f0685c1351685 100644 --- a/providers/tests/openlineage/plugins/test_utils.py +++ b/providers/tests/openlineage/plugins/test_utils.py @@ -27,6 +27,7 @@ from attrs import define from openlineage.client.utils import RedactMixin from pkg_resources import parse_version +from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS, BashOperator from airflow.models import DAG as AIRFLOW_DAG, DagModel from airflow.providers.openlineage.plugins.facets import AirflowDebugRunFacet @@ -44,8 +45,6 @@ from airflow.utils.log.secrets_masker import _secrets_masker from airflow.utils.state import State -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS, BashOperator - if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/providers/tests/openlineage/test_conf.py b/providers/tests/openlineage/test_conf.py index 7f78a6a4c2eea..b1dd57d4d8415 100644 --- a/providers/tests/openlineage/test_conf.py +++ b/providers/tests/openlineage/test_conf.py @@ -20,6 +20,7 @@ from unittest import mock import pytest +from tests_common.test_utils.config import conf_vars, env_vars from airflow.exceptions import AirflowConfigException from airflow.providers.openlineage.conf import ( @@ -39,8 +40,6 @@ transport, ) -from dev.tests_common.test_utils.config import conf_vars, env_vars - _CONFIG_SECTION = "openlineage" _VAR_CONFIG_PATH = "OPENLINEAGE_CONFIG" _CONFIG_OPTION_CONFIG_PATH = "config_path" diff --git a/providers/tests/openlineage/utils/test_utils.py b/providers/tests/openlineage/utils/test_utils.py index 6cf7904546aba..5adeceb440dc1 100644 --- a/providers/tests/openlineage/utils/test_utils.py +++ b/providers/tests/openlineage/utils/test_utils.py @@ -21,6 +21,9 @@ import pathlib from unittest.mock import MagicMock, patch +from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, BashOperator +from tests_common.test_utils.mock_operators import MockOperator + from airflow import DAG from airflow.decorators import task from airflow.models.baseoperator import BaseOperator @@ -44,9 +47,6 @@ from airflow.utils.task_group import TaskGroup from airflow.utils.types import DagRunType -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS, BashOperator -from dev.tests_common.test_utils.mock_operators import MockOperator - BASH_OPERATOR_PATH = "airflow.providers.standard.operators.bash" if not AIRFLOW_V_2_10_PLUS: BASH_OPERATOR_PATH = "airflow.operators.bash" @@ -191,7 +191,7 @@ def test_get_fully_qualified_class_name_mapped_operator(): mapped = MockOperator.partial(task_id="task_2").expand(arg2=["a", "b", "c"]) assert isinstance(mapped, MappedOperator) mapped_op_path = get_fully_qualified_class_name(mapped) - assert mapped_op_path == "dev.tests_common.test_utils.mock_operators.MockOperator" + assert mapped_op_path == "tests_common.test_utils.mock_operators.MockOperator" def test_get_fully_qualified_class_name_bash_operator(): diff --git a/providers/tests/opensearch/log/test_os_task_handler.py b/providers/tests/opensearch/log/test_os_task_handler.py index 1ffa0f5251191..81ac75c855aa9 100644 --- a/providers/tests/opensearch/log/test_os_task_handler.py +++ b/providers/tests/opensearch/log/test_os_task_handler.py @@ -32,6 +32,8 @@ opensearchpy = pytest.importorskip("opensearchpy") from opensearchpy.exceptions import NotFoundError +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_dags, clear_db_runs from airflow.configuration import conf from airflow.providers.opensearch.log.os_response import OpensearchResponse @@ -44,8 +46,6 @@ from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs from providers.tests.opensearch.conftest import MockClient pytestmark = pytest.mark.db_test diff --git a/providers/tests/redis/log/test_redis_task_handler.py b/providers/tests/redis/log/test_redis_task_handler.py index bc7345df9a53b..0d83f3a303136 100644 --- a/providers/tests/redis/log/test_redis_task_handler.py +++ b/providers/tests/redis/log/test_redis_task_handler.py @@ -21,6 +21,7 @@ from unittest.mock import patch import pytest +from tests_common.test_utils.config import conf_vars from airflow.models import DAG, DagRun, TaskInstance from airflow.operators.empty import EmptyOperator @@ -29,8 +30,6 @@ from airflow.utils.state import State from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.db_test diff --git a/providers/tests/sftp/operators/test_sftp.py b/providers/tests/sftp/operators/test_sftp.py index e6c90c2c74f5d..9d5689c881485 100644 --- a/providers/tests/sftp/operators/test_sftp.py +++ b/providers/tests/sftp/operators/test_sftp.py @@ -25,6 +25,7 @@ import paramiko import pytest +from tests_common.test_utils.config import conf_vars from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.models import DAG, Connection @@ -36,8 +37,6 @@ from airflow.utils import timezone from airflow.utils.timezone import datetime -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.db_test diff --git a/providers/tests/smtp/hooks/test_smtp.py b/providers/tests/smtp/hooks/test_smtp.py index 04a20e0ca6f0a..fe9471c424933 100644 --- a/providers/tests/smtp/hooks/test_smtp.py +++ b/providers/tests/smtp/hooks/test_smtp.py @@ -25,14 +25,13 @@ from unittest.mock import Mock, patch import pytest +from tests_common.test_utils.config import conf_vars from airflow.models import Connection from airflow.providers.smtp.hooks.smtp import SmtpHook from airflow.utils import db from airflow.utils.session import create_session -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.db_test diff --git a/providers/tests/smtp/notifications/test_smtp.py b/providers/tests/smtp/notifications/test_smtp.py index 75e1ebf530fae..594fa19dfee34 100644 --- a/providers/tests/smtp/notifications/test_smtp.py +++ b/providers/tests/smtp/notifications/test_smtp.py @@ -21,6 +21,8 @@ from unittest import mock import pytest +from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS +from tests_common.test_utils.config import conf_vars from airflow.configuration import conf from airflow.models import SlaMiss @@ -32,9 +34,6 @@ ) from airflow.utils import timezone -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.db_test SMTP_API_DEFAULT_CONN_ID = SmtpHook.default_conn_name diff --git a/providers/tests/ssh/operators/test_ssh.py b/providers/tests/ssh/operators/test_ssh.py index b304a24882bb9..55431bd3b629e 100644 --- a/providers/tests/ssh/operators/test_ssh.py +++ b/providers/tests/ssh/operators/test_ssh.py @@ -24,6 +24,7 @@ import pytest from paramiko.client import SSHClient +from tests_common.test_utils.config import conf_vars from airflow.exceptions import AirflowException, AirflowSkipException, AirflowTaskTimeout from airflow.models import TaskInstance @@ -32,8 +33,6 @@ from airflow.utils.timezone import datetime from airflow.utils.types import NOTSET -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.db_test diff --git a/providers/tests/standard/operators/test_bash.py b/providers/tests/standard/operators/test_bash.py index 305651ee596ae..f040442ecccd3 100644 --- a/providers/tests/standard/operators/test_bash.py +++ b/providers/tests/standard/operators/test_bash.py @@ -26,6 +26,7 @@ from unittest import mock import pytest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS from airflow.exceptions import AirflowException, AirflowSkipException, AirflowTaskTimeout from airflow.providers.standard.operators.bash import BashOperator @@ -33,8 +34,6 @@ from airflow.utils.state import State from airflow.utils.types import DagRunType -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS - if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/providers/tests/standard/operators/test_datetime.py b/providers/tests/standard/operators/test_datetime.py index 4fbb0863d35ae..da7f98f0e3543 100644 --- a/providers/tests/standard/operators/test_datetime.py +++ b/providers/tests/standard/operators/test_datetime.py @@ -21,6 +21,7 @@ import pytest import time_machine +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS from airflow.exceptions import AirflowException from airflow.models.dagrun import DagRun @@ -31,8 +32,6 @@ from airflow.utils.session import create_session from airflow.utils.state import State -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS - if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/providers/tests/standard/operators/test_weekday.py b/providers/tests/standard/operators/test_weekday.py index 6c20299d15e79..576fd10b54542 100644 --- a/providers/tests/standard/operators/test_weekday.py +++ b/providers/tests/standard/operators/test_weekday.py @@ -21,6 +21,7 @@ import pytest import time_machine +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS from airflow.exceptions import AirflowException from airflow.models.dagrun import DagRun @@ -33,8 +34,6 @@ from airflow.utils.state import State from airflow.utils.weekday import WeekDay -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS - if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/providers/tests/standard/sensors/test_weekday.py b/providers/tests/standard/sensors/test_weekday.py index ef0091a9fdd3a..5bdbf4bb79c81 100644 --- a/providers/tests/standard/sensors/test_weekday.py +++ b/providers/tests/standard/sensors/test_weekday.py @@ -20,6 +20,7 @@ from datetime import timedelta import pytest +from tests_common.test_utils import db from airflow.exceptions import AirflowSensorTimeout from airflow.models import DagBag @@ -28,8 +29,6 @@ from airflow.utils.timezone import datetime from airflow.utils.weekday import WeekDay -from dev.tests_common.test_utils import db - pytestmark = pytest.mark.db_test diff --git a/providers/tests/system/airbyte/example_airbyte_trigger_job.py b/providers/tests/system/airbyte/example_airbyte_trigger_job.py index 1dd3f01d72618..240a293bbd71b 100644 --- a/providers/tests/system/airbyte/example_airbyte_trigger_job.py +++ b/providers/tests/system/airbyte/example_airbyte_trigger_job.py @@ -61,7 +61,7 @@ # Task dependency created via `XComArgs`: # async_source_destination >> airbyte_sensor -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/alibaba/example_adb_spark_batch.py b/providers/tests/system/alibaba/example_adb_spark_batch.py index 35e03a088871d..e8447cefc4b58 100644 --- a/providers/tests/system/alibaba/example_adb_spark_batch.py +++ b/providers/tests/system/alibaba/example_adb_spark_batch.py @@ -50,14 +50,14 @@ spark_pi >> spark_lr - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() # [END howto_operator_adb_spark_batch] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/alibaba/example_adb_spark_sql.py b/providers/tests/system/alibaba/example_adb_spark_sql.py index ac29330d451db..cc587e424b2b4 100644 --- a/providers/tests/system/alibaba/example_adb_spark_sql.py +++ b/providers/tests/system/alibaba/example_adb_spark_sql.py @@ -42,14 +42,14 @@ show_databases >> show_tables - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() # [END howto_operator_adb_spark_sql] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/alibaba/example_oss_bucket.py b/providers/tests/system/alibaba/example_oss_bucket.py index 4870a3b577977..cfe7a42efc55d 100644 --- a/providers/tests/system/alibaba/example_oss_bucket.py +++ b/providers/tests/system/alibaba/example_oss_bucket.py @@ -41,14 +41,14 @@ create_bucket >> delete_bucket - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() # [END howto_operator_oss_bucket] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/alibaba/example_oss_object.py b/providers/tests/system/alibaba/example_oss_object.py index 7305c05bf8384..dfa59917fd6b0 100644 --- a/providers/tests/system/alibaba/example_oss_object.py +++ b/providers/tests/system/alibaba/example_oss_object.py @@ -65,14 +65,14 @@ create_object >> download_object >> delete_object >> delete_batch_object - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/CONTRIBUTING.md b/providers/tests/system/amazon/CONTRIBUTING.md index f12062aa7fb59..4abdf410dd63d 100644 --- a/providers/tests/system/amazon/CONTRIBUTING.md +++ b/providers/tests/system/amazon/CONTRIBUTING.md @@ -212,7 +212,7 @@ chain( task2, # task2 has trigger rule "all done" defined ) -from dev.tests_common.test_utils.watcher import watcher +from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG diff --git a/providers/tests/system/amazon/aws/example_appflow.py b/providers/tests/system/amazon/aws/example_appflow.py index db9dd9a428947..262b6cb803d44 100644 --- a/providers/tests/system/amazon/aws/example_appflow.py +++ b/providers/tests/system/amazon/aws/example_appflow.py @@ -111,13 +111,13 @@ should_be_skipped, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_appflow_run.py b/providers/tests/system/amazon/aws/example_appflow_run.py index d1151d1dd5c8b..106fda1bb7a42 100644 --- a/providers/tests/system/amazon/aws/example_appflow_run.py +++ b/providers/tests/system/amazon/aws/example_appflow_run.py @@ -193,13 +193,13 @@ def delete_flow(flow_name: str): delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_athena.py b/providers/tests/system/amazon/aws/example_athena.py index 2ee1c11ab7881..201e017d12a64 100644 --- a/providers/tests/system/amazon/aws/example_athena.py +++ b/providers/tests/system/amazon/aws/example_athena.py @@ -174,14 +174,14 @@ def read_results_from_s3(bucket_name, query_execution_id): delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_azure_blob_to_s3.py b/providers/tests/system/amazon/aws/example_azure_blob_to_s3.py index 78a55a6a7f7dc..3834365622d30 100644 --- a/providers/tests/system/amazon/aws/example_azure_blob_to_s3.py +++ b/providers/tests/system/amazon/aws/example_azure_blob_to_s3.py @@ -72,13 +72,13 @@ delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_batch.py b/providers/tests/system/amazon/aws/example_batch.py index da035050a77b4..828815a2e1c86 100644 --- a/providers/tests/system/amazon/aws/example_batch.py +++ b/providers/tests/system/amazon/aws/example_batch.py @@ -283,13 +283,13 @@ def delete_job_queue(job_queue_name): log_cleanup, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_bedrock.py b/providers/tests/system/amazon/aws/example_bedrock.py index 8e67ee4d3880b..3caa9f3a35398 100644 --- a/providers/tests/system/amazon/aws/example_bedrock.py +++ b/providers/tests/system/amazon/aws/example_bedrock.py @@ -218,14 +218,14 @@ def run_or_skip(): delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py b/providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py index 0d9eb278460f8..094cb854c3a4f 100644 --- a/providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py +++ b/providers/tests/system/amazon/aws/example_bedrock_retrieve_and_generate.py @@ -594,14 +594,14 @@ def delete_opensearch_policies(collection_name: str): delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_cloudformation.py b/providers/tests/system/amazon/aws/example_cloudformation.py index 38827eebbdb5f..7553f5d9bd0a3 100644 --- a/providers/tests/system/amazon/aws/example_cloudformation.py +++ b/providers/tests/system/amazon/aws/example_cloudformation.py @@ -108,14 +108,14 @@ wait_for_stack_delete, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_comprehend.py b/providers/tests/system/amazon/aws/example_comprehend.py index d8a1f9af88b6c..a4e3bdd4852ef 100644 --- a/providers/tests/system/amazon/aws/example_comprehend.py +++ b/providers/tests/system/amazon/aws/example_comprehend.py @@ -126,13 +126,13 @@ def pii_entities_detection_job_workflow(): delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_comprehend_document_classifier.py b/providers/tests/system/amazon/aws/example_comprehend_document_classifier.py index f5308265a83ca..160e3f3cadf84 100644 --- a/providers/tests/system/amazon/aws/example_comprehend_document_classifier.py +++ b/providers/tests/system/amazon/aws/example_comprehend_document_classifier.py @@ -231,13 +231,13 @@ def delete_connection(conn_id): delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_datasync.py b/providers/tests/system/amazon/aws/example_datasync.py index a8363fe81b36e..8d25fcf9c3989 100644 --- a/providers/tests/system/amazon/aws/example_datasync.py +++ b/providers/tests/system/amazon/aws/example_datasync.py @@ -235,13 +235,13 @@ def delete_locations(locations): delete_s3_bucket_destination, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_dms.py b/providers/tests/system/amazon/aws/example_dms.py index b2e99daf6290f..5d15aa0c4ecb0 100644 --- a/providers/tests/system/amazon/aws/example_dms.py +++ b/providers/tests/system/amazon/aws/example_dms.py @@ -429,13 +429,13 @@ def delete_security_group(security_group_id: str, security_group_name: str): delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_dynamodb.py b/providers/tests/system/amazon/aws/example_dynamodb.py index 3d709c36b02ad..3cfa15ecaa36e 100644 --- a/providers/tests/system/amazon/aws/example_dynamodb.py +++ b/providers/tests/system/amazon/aws/example_dynamodb.py @@ -117,14 +117,14 @@ def delete_table(table_name: str): delete_table, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_dynamodb_to_s3.py b/providers/tests/system/amazon/aws/example_dynamodb_to_s3.py index 86a5d76c9587f..989fe79b2d9f6 100644 --- a/providers/tests/system/amazon/aws/example_dynamodb_to_s3.py +++ b/providers/tests/system/amazon/aws/example_dynamodb_to_s3.py @@ -256,13 +256,13 @@ def skip_incremental_export(start_time: datetime, end_time: datetime): delete_table, delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_ec2.py b/providers/tests/system/amazon/aws/example_ec2.py index e55adab775ffd..750ec19c27779 100644 --- a/providers/tests/system/amazon/aws/example_ec2.py +++ b/providers/tests/system/amazon/aws/example_ec2.py @@ -202,13 +202,13 @@ def parse_response(instance_ids: list): delete_key_pair(key_name), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_ecs.py b/providers/tests/system/amazon/aws/example_ecs.py index e1aad99b5e7cb..0f9118ae34af3 100644 --- a/providers/tests/system/amazon/aws/example_ecs.py +++ b/providers/tests/system/amazon/aws/example_ecs.py @@ -212,14 +212,14 @@ def clean_logs(group_name: str): clean_logs(log_group_name), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_ecs_fargate.py b/providers/tests/system/amazon/aws/example_ecs_fargate.py index ca592234aa77f..df3afbce7df71 100644 --- a/providers/tests/system/amazon/aws/example_ecs_fargate.py +++ b/providers/tests/system/amazon/aws/example_ecs_fargate.py @@ -156,13 +156,13 @@ def delete_cluster(cluster_name: str) -> None: delete_cluster(cluster_name), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_eks_templated.py b/providers/tests/system/amazon/aws/example_eks_templated.py index 3b1ba5059725a..de4d5e621ef0a 100644 --- a/providers/tests/system/amazon/aws/example_eks_templated.py +++ b/providers/tests/system/amazon/aws/example_eks_templated.py @@ -148,13 +148,13 @@ await_delete_cluster, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py b/providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py index 27ee2be0d728a..a267463c11f3d 100644 --- a/providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py +++ b/providers/tests/system/amazon/aws/example_eks_with_fargate_in_one_step.py @@ -141,14 +141,14 @@ await_delete_cluster, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py b/providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py index 34cbed1645fa5..1348b352cc51f 100644 --- a/providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py +++ b/providers/tests/system/amazon/aws/example_eks_with_fargate_profile.py @@ -175,13 +175,13 @@ await_delete_cluster, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py b/providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py index 11f843688524c..07538d94d17b3 100644 --- a/providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py +++ b/providers/tests/system/amazon/aws/example_eks_with_nodegroup_in_one_step.py @@ -155,14 +155,14 @@ def delete_launch_template(template_name: str): delete_launch_template(launch_template_name), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_eks_with_nodegroups.py b/providers/tests/system/amazon/aws/example_eks_with_nodegroups.py index 8356a8ee4375c..98448767d9745 100644 --- a/providers/tests/system/amazon/aws/example_eks_with_nodegroups.py +++ b/providers/tests/system/amazon/aws/example_eks_with_nodegroups.py @@ -198,14 +198,14 @@ def delete_launch_template(template_name: str): delete_launch_template(launch_template_name), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_emr.py b/providers/tests/system/amazon/aws/example_emr.py index 44c28630eb69e..9a29667b5d098 100644 --- a/providers/tests/system/amazon/aws/example_emr.py +++ b/providers/tests/system/amazon/aws/example_emr.py @@ -227,14 +227,14 @@ def get_step_id(step_ids: list): ] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_emr_eks.py b/providers/tests/system/amazon/aws/example_emr_eks.py index d95645fd306a6..cafaf09ed7473 100644 --- a/providers/tests/system/amazon/aws/example_emr_eks.py +++ b/providers/tests/system/amazon/aws/example_emr_eks.py @@ -320,13 +320,13 @@ def delete_virtual_cluster(virtual_cluster_id): delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_emr_notebook_execution.py b/providers/tests/system/amazon/aws/example_emr_notebook_execution.py index 48d2b14c96573..681e97048aa58 100644 --- a/providers/tests/system/amazon/aws/example_emr_notebook_execution.py +++ b/providers/tests/system/amazon/aws/example_emr_notebook_execution.py @@ -112,13 +112,13 @@ wait_for_execution_finish, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_emr_serverless.py b/providers/tests/system/amazon/aws/example_emr_serverless.py index 4901fa6736059..9059f0e1b1e02 100644 --- a/providers/tests/system/amazon/aws/example_emr_serverless.py +++ b/providers/tests/system/amazon/aws/example_emr_serverless.py @@ -155,13 +155,13 @@ delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_eventbridge.py b/providers/tests/system/amazon/aws/example_eventbridge.py index 5470bd0ca70cc..6f31fc9ce37d8 100644 --- a/providers/tests/system/amazon/aws/example_eventbridge.py +++ b/providers/tests/system/amazon/aws/example_eventbridge.py @@ -81,7 +81,7 @@ chain(test_context, put_events, put_rule, enable_rule, disable_rule) -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_ftp_to_s3.py b/providers/tests/system/amazon/aws/example_ftp_to_s3.py index 98a37e197f8fe..b3e0d2ccf43f5 100644 --- a/providers/tests/system/amazon/aws/example_ftp_to_s3.py +++ b/providers/tests/system/amazon/aws/example_ftp_to_s3.py @@ -72,13 +72,13 @@ delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_gcs_to_s3.py b/providers/tests/system/amazon/aws/example_gcs_to_s3.py index 97ed8dfda3a98..5257b440808e9 100644 --- a/providers/tests/system/amazon/aws/example_gcs_to_s3.py +++ b/providers/tests/system/amazon/aws/example_gcs_to_s3.py @@ -115,13 +115,13 @@ def upload_gcs_file(bucket_name: str, object_name: str, user_project: str): delete_gcs_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_glacier_to_gcs.py b/providers/tests/system/amazon/aws/example_glacier_to_gcs.py index 28c061ae057c9..9281d3c92e1a0 100644 --- a/providers/tests/system/amazon/aws/example_glacier_to_gcs.py +++ b/providers/tests/system/amazon/aws/example_glacier_to_gcs.py @@ -108,13 +108,13 @@ def delete_vault(vault_name): delete_vault(vault_name), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_glue.py b/providers/tests/system/amazon/aws/example_glue.py index 5314d26eff80c..3d042c79d44cb 100644 --- a/providers/tests/system/amazon/aws/example_glue.py +++ b/providers/tests/system/amazon/aws/example_glue.py @@ -217,13 +217,13 @@ def glue_cleanup(crawler_name: str, job_name: str, db_name: str) -> None: log_cleanup, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_glue_data_quality.py b/providers/tests/system/amazon/aws/example_glue_data_quality.py index 4d05d6e22b436..3c1bdb378b5dc 100644 --- a/providers/tests/system/amazon/aws/example_glue_data_quality.py +++ b/providers/tests/system/amazon/aws/example_glue_data_quality.py @@ -198,13 +198,13 @@ def delete_ruleset(ruleset_name): delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py b/providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py index fdb3cb8e63ad8..a0dd4ebba7b56 100644 --- a/providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py +++ b/providers/tests/system/amazon/aws/example_glue_data_quality_with_recommendation.py @@ -197,13 +197,13 @@ def delete_ruleset(ruleset_name): delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_glue_databrew.py b/providers/tests/system/amazon/aws/example_glue_databrew.py index 3218b465c290e..95799666f46e4 100644 --- a/providers/tests/system/amazon/aws/example_glue_databrew.py +++ b/providers/tests/system/amazon/aws/example_glue_databrew.py @@ -155,14 +155,14 @@ def delete_job(job_name: str): delete_output_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py b/providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py index 7ea7ae9d072f7..24a4eba54d59b 100644 --- a/providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py +++ b/providers/tests/system/amazon/aws/example_google_api_sheets_to_s3.py @@ -83,13 +83,13 @@ delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py b/providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py index ae4f721567cec..3d5ff917bfc0a 100644 --- a/providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py +++ b/providers/tests/system/amazon/aws/example_google_api_youtube_to_s3.py @@ -197,13 +197,13 @@ def transform_video_ids(**kwargs): delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_hive_to_dynamodb.py b/providers/tests/system/amazon/aws/example_hive_to_dynamodb.py index 5582c13b855f5..0c0978361bab7 100644 --- a/providers/tests/system/amazon/aws/example_hive_to_dynamodb.py +++ b/providers/tests/system/amazon/aws/example_hive_to_dynamodb.py @@ -150,13 +150,13 @@ def configure_hive_connection(connection_id, hostname): delete_dynamodb_table(dynamodb_table_name), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_http_to_s3.py b/providers/tests/system/amazon/aws/example_http_to_s3.py index cfbc131ee5689..612d68ef36e0c 100644 --- a/providers/tests/system/amazon/aws/example_http_to_s3.py +++ b/providers/tests/system/amazon/aws/example_http_to_s3.py @@ -119,13 +119,13 @@ def create_connection(conn_id_name: str): delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py b/providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py index b43023115cf4d..c376778b0df3a 100644 --- a/providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py +++ b/providers/tests/system/amazon/aws/example_imap_attachment_to_s3.py @@ -90,13 +90,13 @@ delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_kinesis_analytics.py b/providers/tests/system/amazon/aws/example_kinesis_analytics.py index 600710cc43b4b..b4d4566fc7600 100644 --- a/providers/tests/system/amazon/aws/example_kinesis_analytics.py +++ b/providers/tests/system/amazon/aws/example_kinesis_analytics.py @@ -263,13 +263,13 @@ def delete_kinesis_stream(stream: str, region: str): delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_lambda.py b/providers/tests/system/amazon/aws/example_lambda.py index fd346a34589f0..767fae86387fd 100644 --- a/providers/tests/system/amazon/aws/example_lambda.py +++ b/providers/tests/system/amazon/aws/example_lambda.py @@ -129,13 +129,13 @@ def delete_lambda(function_name: str): log_cleanup, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_local_to_s3.py b/providers/tests/system/amazon/aws/example_local_to_s3.py index 47344c429b958..cdd9748974433 100644 --- a/providers/tests/system/amazon/aws/example_local_to_s3.py +++ b/providers/tests/system/amazon/aws/example_local_to_s3.py @@ -91,14 +91,14 @@ def delete_temp_file(): delete_temp_file(), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_mongo_to_s3.py b/providers/tests/system/amazon/aws/example_mongo_to_s3.py index 3e5bc318e5c17..6fa5f612b0681 100644 --- a/providers/tests/system/amazon/aws/example_mongo_to_s3.py +++ b/providers/tests/system/amazon/aws/example_mongo_to_s3.py @@ -83,13 +83,13 @@ delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_neptune.py b/providers/tests/system/amazon/aws/example_neptune.py index 4685840b42594..7682a65b65d6d 100644 --- a/providers/tests/system/amazon/aws/example_neptune.py +++ b/providers/tests/system/amazon/aws/example_neptune.py @@ -77,13 +77,13 @@ def delete_cluster(cluster_id): # TEST TEARDOWN delete_cluster(cluster_id), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_quicksight.py b/providers/tests/system/amazon/aws/example_quicksight.py index ec5cd38003148..eb53ffe60e012 100644 --- a/providers/tests/system/amazon/aws/example_quicksight.py +++ b/providers/tests/system/amazon/aws/example_quicksight.py @@ -218,13 +218,13 @@ def delete_ingestion(aws_account_id: str, dataset_name: str, ingestion_name: str delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_rds_event.py b/providers/tests/system/amazon/aws/example_rds_event.py index 79d70c9d394e2..58ab603326b96 100644 --- a/providers/tests/system/amazon/aws/example_rds_event.py +++ b/providers/tests/system/amazon/aws/example_rds_event.py @@ -119,14 +119,14 @@ def delete_sns_topic(topic_arn) -> None: delete_sns_topic(sns_topic), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_rds_export.py b/providers/tests/system/amazon/aws/example_rds_export.py index 34638f030f506..756105d802740 100644 --- a/providers/tests/system/amazon/aws/example_rds_export.py +++ b/providers/tests/system/amazon/aws/example_rds_export.py @@ -178,13 +178,13 @@ def get_snapshot_arn(snapshot_name: str) -> str: delete_db_instance, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_rds_instance.py b/providers/tests/system/amazon/aws/example_rds_instance.py index ce49f73c1f4ae..b2a9e7482e7d0 100644 --- a/providers/tests/system/amazon/aws/example_rds_instance.py +++ b/providers/tests/system/amazon/aws/example_rds_instance.py @@ -111,14 +111,14 @@ delete_db_instance, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_rds_snapshot.py b/providers/tests/system/amazon/aws/example_rds_snapshot.py index c58c1db54d539..b6b0c468e589b 100644 --- a/providers/tests/system/amazon/aws/example_rds_snapshot.py +++ b/providers/tests/system/amazon/aws/example_rds_snapshot.py @@ -138,14 +138,14 @@ delete_db_instance, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_redshift.py b/providers/tests/system/amazon/aws/example_redshift.py index 986bce5a07b52..146a20da451ba 100644 --- a/providers/tests/system/amazon/aws/example_redshift.py +++ b/providers/tests/system/amazon/aws/example_redshift.py @@ -248,13 +248,13 @@ delete_cluster_snapshot, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_redshift_s3_transfers.py b/providers/tests/system/amazon/aws/example_redshift_s3_transfers.py index 2a553eae8cb60..536f9b97157dc 100644 --- a/providers/tests/system/amazon/aws/example_redshift_s3_transfers.py +++ b/providers/tests/system/amazon/aws/example_redshift_s3_transfers.py @@ -318,13 +318,13 @@ def _insert_data(table_name: str) -> str: delete_cluster, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_s3.py b/providers/tests/system/amazon/aws/example_s3.py index d016076022874..a9c93fdd5bebe 100644 --- a/providers/tests/system/amazon/aws/example_s3.py +++ b/providers/tests/system/amazon/aws/example_s3.py @@ -323,13 +323,13 @@ def check_fn(files: list, **kwargs) -> bool: delete_bucket, delete_bucket_2, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_s3_to_dynamodb.py b/providers/tests/system/amazon/aws/example_s3_to_dynamodb.py index efd762cfd93c1..4bc4fa6b9393e 100644 --- a/providers/tests/system/amazon/aws/example_s3_to_dynamodb.py +++ b/providers/tests/system/amazon/aws/example_s3_to_dynamodb.py @@ -181,13 +181,13 @@ def delete_dynamodb_table(table_name: str): delete_new_table, delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_s3_to_ftp.py b/providers/tests/system/amazon/aws/example_s3_to_ftp.py index 0ffb4722ba867..4dd134f95c3f8 100644 --- a/providers/tests/system/amazon/aws/example_s3_to_ftp.py +++ b/providers/tests/system/amazon/aws/example_s3_to_ftp.py @@ -71,13 +71,13 @@ delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_s3_to_sftp.py b/providers/tests/system/amazon/aws/example_s3_to_sftp.py index 5270d3ec7d4b9..dc7d010628679 100644 --- a/providers/tests/system/amazon/aws/example_s3_to_sftp.py +++ b/providers/tests/system/amazon/aws/example_s3_to_sftp.py @@ -71,13 +71,13 @@ delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_s3_to_sql.py b/providers/tests/system/amazon/aws/example_s3_to_sql.py index e7e8a5e9543cf..d7f452bfeb087 100644 --- a/providers/tests/system/amazon/aws/example_s3_to_sql.py +++ b/providers/tests/system/amazon/aws/example_s3_to_sql.py @@ -18,6 +18,8 @@ from datetime import datetime +from tests_common.test_utils.watcher import watcher + from airflow import settings from airflow.decorators import task from airflow.models import Connection @@ -40,7 +42,6 @@ from airflow.providers.common.sql.operators.sql import SQLTableCheckOperator from airflow.utils.trigger_rule import TriggerRule -from dev.tests_common.test_utils.watcher import watcher from providers.tests.system.amazon.aws.utils import ENV_ID_KEY, SystemTestContextBuilder # Externally fetched variables: @@ -254,7 +255,7 @@ def parse_csv_to_generator(filepath): list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_sagemaker.py b/providers/tests/system/amazon/aws/example_sagemaker.py index acb05400b951e..96e9756659975 100644 --- a/providers/tests/system/amazon/aws/example_sagemaker.py +++ b/providers/tests/system/amazon/aws/example_sagemaker.py @@ -646,13 +646,13 @@ def delete_docker_image(image_name): log_cleanup, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_sagemaker_endpoint.py b/providers/tests/system/amazon/aws/example_sagemaker_endpoint.py index fecaa92264b6e..165e5d5edd444 100644 --- a/providers/tests/system/amazon/aws/example_sagemaker_endpoint.py +++ b/providers/tests/system/amazon/aws/example_sagemaker_endpoint.py @@ -291,13 +291,13 @@ def set_up(env_id, role_arn, ti=None): archive_logs(f"/aws/sagemaker/Endpoints/{test_setup['endpoint_name']}"), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_sagemaker_notebook.py b/providers/tests/system/amazon/aws/example_sagemaker_notebook.py index 76d35dc2b1479..e989be7a92e5f 100644 --- a/providers/tests/system/amazon/aws/example_sagemaker_notebook.py +++ b/providers/tests/system/amazon/aws/example_sagemaker_notebook.py @@ -97,13 +97,13 @@ delete_instance, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_sagemaker_pipeline.py b/providers/tests/system/amazon/aws/example_sagemaker_pipeline.py index 614fb5c13cc9e..9c0c9d7284837 100644 --- a/providers/tests/system/amazon/aws/example_sagemaker_pipeline.py +++ b/providers/tests/system/amazon/aws/example_sagemaker_pipeline.py @@ -117,13 +117,13 @@ def delete_pipeline(name: str): delete_experiments([pipeline_name]), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_salesforce_to_s3.py b/providers/tests/system/amazon/aws/example_salesforce_to_s3.py index 7ee15f7030beb..a533c8c3fbbf3 100644 --- a/providers/tests/system/amazon/aws/example_salesforce_to_s3.py +++ b/providers/tests/system/amazon/aws/example_salesforce_to_s3.py @@ -78,13 +78,13 @@ delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_sftp_to_s3.py b/providers/tests/system/amazon/aws/example_sftp_to_s3.py index 85dac988cb82e..765a68c9946f5 100644 --- a/providers/tests/system/amazon/aws/example_sftp_to_s3.py +++ b/providers/tests/system/amazon/aws/example_sftp_to_s3.py @@ -71,13 +71,13 @@ delete_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_sns.py b/providers/tests/system/amazon/aws/example_sns.py index d00227536da0b..06756a244ae9d 100644 --- a/providers/tests/system/amazon/aws/example_sns.py +++ b/providers/tests/system/amazon/aws/example_sns.py @@ -75,14 +75,14 @@ def delete_topic(topic_arn) -> None: delete_topic(create_sns_topic), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_sql_to_s3.py b/providers/tests/system/amazon/aws/example_sql_to_s3.py index dd333073a0431..4cb8a2e7b9be2 100644 --- a/providers/tests/system/amazon/aws/example_sql_to_s3.py +++ b/providers/tests/system/amazon/aws/example_sql_to_s3.py @@ -199,14 +199,14 @@ def create_connection(conn_id_name: str, cluster_id: str): delete_cluster, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_sqs.py b/providers/tests/system/amazon/aws/example_sqs.py index 2aba6a580c4d7..f14cf3b7a713c 100644 --- a/providers/tests/system/amazon/aws/example_sqs.py +++ b/providers/tests/system/amazon/aws/example_sqs.py @@ -100,14 +100,14 @@ def delete_queue(queue_url): delete_queue(sqs_queue), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/example_step_functions.py b/providers/tests/system/amazon/aws/example_step_functions.py index beeb12528c7ac..eb3a399c69a93 100644 --- a/providers/tests/system/amazon/aws/example_step_functions.py +++ b/providers/tests/system/amazon/aws/example_step_functions.py @@ -112,13 +112,13 @@ def delete_state_machine(state_machine_arn): delete_state_machine(state_machine_arn), ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/amazon/aws/tests/test_aws_auth_manager.py b/providers/tests/system/amazon/aws/tests/test_aws_auth_manager.py index 774aec21b2b6b..2656474cbbf51 100644 --- a/providers/tests/system/amazon/aws/tests/test_aws_auth_manager.py +++ b/providers/tests/system/amazon/aws/tests/test_aws_auth_manager.py @@ -21,11 +21,11 @@ import boto3 import pytest +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.www import check_content_in_response from airflow.www import app as application -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.www import check_content_in_response from providers.tests.system.amazon.aws.utils import set_env_id SAML_METADATA_URL = "/saml/metadata" diff --git a/providers/tests/system/apache/beam/example_beam.py b/providers/tests/system/apache/beam/example_beam.py index 4166caa3477a4..b90971149bac8 100644 --- a/providers/tests/system/apache/beam/example_beam.py +++ b/providers/tests/system/apache/beam/example_beam.py @@ -61,7 +61,7 @@ # [END howto_operator_start_java_direct_runner_pipeline] -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/beam/example_beam_java_flink.py b/providers/tests/system/apache/beam/example_beam_java_flink.py index a68637d8ef8d9..8f639b0829f8d 100644 --- a/providers/tests/system/apache/beam/example_beam_java_flink.py +++ b/providers/tests/system/apache/beam/example_beam_java_flink.py @@ -60,7 +60,7 @@ jar_to_local_flink_runner >> start_java_pipeline_flink_runner -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/beam/example_beam_java_spark.py b/providers/tests/system/apache/beam/example_beam_java_spark.py index eb3fea6d16dab..b45ce5bbba642 100644 --- a/providers/tests/system/apache/beam/example_beam_java_spark.py +++ b/providers/tests/system/apache/beam/example_beam_java_spark.py @@ -60,7 +60,7 @@ jar_to_local_spark_runner >> start_java_pipeline_spark_runner -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/beam/example_go.py b/providers/tests/system/apache/beam/example_go.py index 792eee088bf6d..ecdd8a20ed370 100644 --- a/providers/tests/system/apache/beam/example_go.py +++ b/providers/tests/system/apache/beam/example_go.py @@ -103,7 +103,7 @@ ) -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/beam/example_go_dataflow.py b/providers/tests/system/apache/beam/example_go_dataflow.py index 35a63cb56e1e7..e1904ea13050b 100644 --- a/providers/tests/system/apache/beam/example_go_dataflow.py +++ b/providers/tests/system/apache/beam/example_go_dataflow.py @@ -76,7 +76,7 @@ # [END howto_operator_start_go_dataflow_runner_pipeline_async_gcs_file] -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/beam/example_java_dataflow.py b/providers/tests/system/apache/beam/example_java_dataflow.py index 334142dfd315c..d01d52fe658eb 100644 --- a/providers/tests/system/apache/beam/example_java_dataflow.py +++ b/providers/tests/system/apache/beam/example_java_dataflow.py @@ -66,7 +66,7 @@ # [END howto_operator_start_java_dataflow_runner_pipeline] -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/beam/example_python.py b/providers/tests/system/apache/beam/example_python.py index d685cb33d2dad..4b4b9c2c05b36 100644 --- a/providers/tests/system/apache/beam/example_python.py +++ b/providers/tests/system/apache/beam/example_python.py @@ -120,7 +120,7 @@ ) -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/beam/example_python_async.py b/providers/tests/system/apache/beam/example_python_async.py index 8465278b6bf76..0419a13cced23 100644 --- a/providers/tests/system/apache/beam/example_python_async.py +++ b/providers/tests/system/apache/beam/example_python_async.py @@ -129,7 +129,7 @@ ) -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/beam/example_python_dataflow.py b/providers/tests/system/apache/beam/example_python_dataflow.py index 36d6b3b8562e3..8d1295ed29a0e 100644 --- a/providers/tests/system/apache/beam/example_python_dataflow.py +++ b/providers/tests/system/apache/beam/example_python_dataflow.py @@ -79,7 +79,7 @@ # [END howto_operator_start_python_dataflow_runner_pipeline_async_gcs_file] -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/cassandra/example_cassandra_dag.py b/providers/tests/system/apache/cassandra/example_cassandra_dag.py index 0b25fa2975dbd..5c1fe9b5271c5 100644 --- a/providers/tests/system/apache/cassandra/example_cassandra_dag.py +++ b/providers/tests/system/apache/cassandra/example_cassandra_dag.py @@ -50,7 +50,7 @@ record_sensor = CassandraRecordSensor(task_id="cassandra_record_sensor", keys={"p1": "v1", "p2": "v2"}) # [END howto_operator_cassandra_sensors] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/drill/example_drill_dag.py b/providers/tests/system/apache/drill/example_drill_dag.py index fdaf376d5a162..7386f3b59cb1a 100644 --- a/providers/tests/system/apache/drill/example_drill_dag.py +++ b/providers/tests/system/apache/drill/example_drill_dag.py @@ -47,7 +47,7 @@ ) # [END howto_operator_drill] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/druid/example_druid_dag.py b/providers/tests/system/apache/druid/example_druid_dag.py index 5e1bfc446bb09..eac9af6d19c57 100644 --- a/providers/tests/system/apache/druid/example_druid_dag.py +++ b/providers/tests/system/apache/druid/example_druid_dag.py @@ -55,7 +55,7 @@ """ # [END howto_operator_druid_submit] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/hive/example_twitter_dag.py b/providers/tests/system/apache/hive/example_twitter_dag.py index 129a8ebf9f77f..c247bf9033020 100644 --- a/providers/tests/system/apache/hive/example_twitter_dag.py +++ b/providers/tests/system/apache/hive/example_twitter_dag.py @@ -154,13 +154,13 @@ def transfer_to_db(): analyze >> load_to_hdfs >> load_to_hive >> hive_to_mysql - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/iceberg/example_iceberg.py b/providers/tests/system/apache/iceberg/example_iceberg.py index a4708805b6788..3c9f59f1ea14d 100644 --- a/providers/tests/system/apache/iceberg/example_iceberg.py +++ b/providers/tests/system/apache/iceberg/example_iceberg.py @@ -49,7 +49,7 @@ ) -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/kafka/example_dag_event_listener.py b/providers/tests/system/apache/kafka/example_dag_event_listener.py index eb6866735e830..734cb9c49554c 100644 --- a/providers/tests/system/apache/kafka/example_dag_event_listener.py +++ b/providers/tests/system/apache/kafka/example_dag_event_listener.py @@ -120,7 +120,7 @@ def wait_for_event(message, **context): t0 >> t1 -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/kafka/example_dag_hello_kafka.py b/providers/tests/system/apache/kafka/example_dag_hello_kafka.py index 5e70d7324a1a0..88fef458ea261 100644 --- a/providers/tests/system/apache/kafka/example_dag_hello_kafka.py +++ b/providers/tests/system/apache/kafka/example_dag_hello_kafka.py @@ -240,7 +240,7 @@ def hello_kafka(): t0 >> t3 >> [t4, t4b] >> t5 >> t6 -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/kylin/example_kylin_dag.py b/providers/tests/system/apache/kylin/example_kylin_dag.py index 1bfec2cd1a7dd..e4de4105e05b4 100644 --- a/providers/tests/system/apache/kylin/example_kylin_dag.py +++ b/providers/tests/system/apache/kylin/example_kylin_dag.py @@ -109,13 +109,13 @@ def gen_build_time(): # gen_build_time >> refresh_task1 # gen_build_time >> merge_task # gen_build_time >> build_task3 - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/livy/example_livy.py b/providers/tests/system/apache/livy/example_livy.py index 09f8f7e244e02..8f3ea04a00867 100644 --- a/providers/tests/system/apache/livy/example_livy.py +++ b/providers/tests/system/apache/livy/example_livy.py @@ -73,13 +73,13 @@ livy_java_task_deferrable >> livy_python_task_deferrable # [END create_livy_deferrable] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/pig/example_pig.py b/providers/tests/system/apache/pig/example_pig.py index bb556a85111d1..4d47bf5e6570c 100644 --- a/providers/tests/system/apache/pig/example_pig.py +++ b/providers/tests/system/apache/pig/example_pig.py @@ -44,7 +44,7 @@ # [END create_pig] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/pinot/example_pinot_dag.py b/providers/tests/system/apache/pinot/example_pinot_dag.py index d95022f366910..825c5070e2944 100644 --- a/providers/tests/system/apache/pinot/example_pinot_dag.py +++ b/providers/tests/system/apache/pinot/example_pinot_dag.py @@ -52,7 +52,7 @@ def pinot_dbi_api(): pinot_admin() pinot_dbi_api() -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/spark/example_pyspark.py b/providers/tests/system/apache/spark/example_pyspark.py index 0ca14a76f48f9..d2d8ef6983f04 100644 --- a/providers/tests/system/apache/spark/example_pyspark.py +++ b/providers/tests/system/apache/spark/example_pyspark.py @@ -70,7 +70,7 @@ def print_df(df: pd.DataFrame): dag = example_pyspark() # type: ignore -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/apache/spark/example_spark_dag.py b/providers/tests/system/apache/spark/example_spark_dag.py index b68fc2cf761a9..6680450ae987a 100644 --- a/providers/tests/system/apache/spark/example_spark_dag.py +++ b/providers/tests/system/apache/spark/example_spark_dag.py @@ -75,7 +75,7 @@ ) # [END howto_operator_spark_sql] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/asana/example_asana.py b/providers/tests/system/asana/example_asana.py index ca68b6cc3dd2e..a7f4943beda41 100644 --- a/providers/tests/system/asana/example_asana.py +++ b/providers/tests/system/asana/example_asana.py @@ -100,13 +100,13 @@ create >> find >> update >> delete # [END asana_example_dag] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/cncf/kubernetes/example_kubernetes.py b/providers/tests/system/cncf/kubernetes/example_kubernetes.py index bed3b9da746b9..dc74895a3e0fd 100644 --- a/providers/tests/system/cncf/kubernetes/example_kubernetes.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes.py @@ -167,13 +167,13 @@ write_xcom >> pod_task_xcom_result # [END howto_operator_k8s_write_xcom] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/cncf/kubernetes/example_kubernetes_async.py b/providers/tests/system/cncf/kubernetes/example_kubernetes_async.py index 43eb0a1f919cf..7eb08442be3ba 100644 --- a/providers/tests/system/cncf/kubernetes/example_kubernetes_async.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes_async.py @@ -198,13 +198,13 @@ write_xcom_async >> pod_task_xcom_result_async # [END howto_operator_k8s_write_xcom_async] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/cncf/kubernetes/example_kubernetes_decorator.py b/providers/tests/system/cncf/kubernetes/example_kubernetes_decorator.py index 088a3fdae2ede..6784b22f3a141 100644 --- a/providers/tests/system/cncf/kubernetes/example_kubernetes_decorator.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes_decorator.py @@ -63,7 +63,7 @@ def print_pattern(): # [END howto_operator_kubernetes] -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/cncf/kubernetes/example_kubernetes_job.py b/providers/tests/system/cncf/kubernetes/example_kubernetes_job.py index 1a2432c135898..c81ff48e792d4 100644 --- a/providers/tests/system/cncf/kubernetes/example_kubernetes_job.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes_job.py @@ -94,13 +94,13 @@ k8s_job >> update_job >> delete_job_task k8s_job_def >> delete_job_task_def - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/cncf/kubernetes/example_kubernetes_resource.py b/providers/tests/system/cncf/kubernetes/example_kubernetes_resource.py index 1c24a533a85f0..e7f6a4d42d32e 100644 --- a/providers/tests/system/cncf/kubernetes/example_kubernetes_resource.py +++ b/providers/tests/system/cncf/kubernetes/example_kubernetes_resource.py @@ -72,13 +72,13 @@ t1 >> t2 - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/cncf/kubernetes/example_spark_kubernetes.py b/providers/tests/system/cncf/kubernetes/example_spark_kubernetes.py index 072cffeb90f04..b244a9a800bab 100644 --- a/providers/tests/system/cncf/kubernetes/example_spark_kubernetes.py +++ b/providers/tests/system/cncf/kubernetes/example_spark_kubernetes.py @@ -76,13 +76,13 @@ t1 >> t2 # [END SparkKubernetesOperator_DAG] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/cohere/example_cohere_embedding_operator.py b/providers/tests/system/cohere/example_cohere_embedding_operator.py index 9686724f2cea9..d8907260d3fd7 100644 --- a/providers/tests/system/cohere/example_cohere_embedding_operator.py +++ b/providers/tests/system/cohere/example_cohere_embedding_operator.py @@ -34,7 +34,7 @@ # [END howto_operator_cohere_embedding] -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/common/io/example_file_transfer_local_to_s3.py b/providers/tests/system/common/io/example_file_transfer_local_to_s3.py index 0b90a75f2e6e2..f1522d5f2611b 100644 --- a/providers/tests/system/common/io/example_file_transfer_local_to_s3.py +++ b/providers/tests/system/common/io/example_file_transfer_local_to_s3.py @@ -76,12 +76,12 @@ def remove_bucket(): temp_file >> transfer >> remove_bucket() >> delete_temp_file(temp_file_path) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/common/sql/example_sql_column_table_check.py b/providers/tests/system/common/sql/example_sql_column_table_check.py index c48dc60bfc639..4033c12faf43b 100644 --- a/providers/tests/system/common/sql/example_sql_column_table_check.py +++ b/providers/tests/system/common/sql/example_sql_column_table_check.py @@ -79,7 +79,7 @@ column_check >> row_count_check -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/common/sql/example_sql_execute_query.py b/providers/tests/system/common/sql/example_sql_execute_query.py index 8dced19cef719..535798046305b 100644 --- a/providers/tests/system/common/sql/example_sql_execute_query.py +++ b/providers/tests/system/common/sql/example_sql_execute_query.py @@ -56,7 +56,7 @@ # [END howto_operator_sql_execute_query] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/databricks/example_databricks.py b/providers/tests/system/databricks/example_databricks.py index 00d9969bd99b3..999cebb674292 100644 --- a/providers/tests/system/databricks/example_databricks.py +++ b/providers/tests/system/databricks/example_databricks.py @@ -238,13 +238,13 @@ ) # [END howto_operator_databricks_task_sql] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/databricks/example_databricks_repos.py b/providers/tests/system/databricks/example_databricks_repos.py index d07226e76ed01..33b0d1266f477 100644 --- a/providers/tests/system/databricks/example_databricks_repos.py +++ b/providers/tests/system/databricks/example_databricks_repos.py @@ -78,13 +78,13 @@ (create_repo >> update_repo >> notebook_task >> delete_repo) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/databricks/example_databricks_sensors.py b/providers/tests/system/databricks/example_databricks_sensors.py index ea233a79395bc..cf676183e6c9b 100644 --- a/providers/tests/system/databricks/example_databricks_sensors.py +++ b/providers/tests/system/databricks/example_databricks_sensors.py @@ -88,7 +88,7 @@ # runs, else all the subsequent DAG tasks and the DAG are marked as failed. (sql_sensor >> partition_sensor) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This example does not need a watcher in order to properly mark success/failure # since it is a single task, but it is given here as an example for users to @@ -96,7 +96,7 @@ # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/databricks/example_databricks_sql.py b/providers/tests/system/databricks/example_databricks_sql.py index f08ce3cd56b65..79c2bfa2fca9a 100644 --- a/providers/tests/system/databricks/example_databricks_sql.py +++ b/providers/tests/system/databricks/example_databricks_sql.py @@ -113,13 +113,13 @@ (create >> create_file >> import_csv >> select >> select_into_file) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/databricks/example_databricks_workflow.py b/providers/tests/system/databricks/example_databricks_workflow.py index 3a6c829520d89..5921f109d0843 100644 --- a/providers/tests/system/databricks/example_databricks_workflow.py +++ b/providers/tests/system/databricks/example_databricks_workflow.py @@ -143,13 +143,13 @@ notebook_1 >> notebook_2 >> task_operator_nb_1 >> sql_query # [END howto_databricks_workflow_notebook] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/dbt/cloud/example_dbt_cloud.py b/providers/tests/system/dbt/cloud/example_dbt_cloud.py index 17f8a59efea02..7a9a58a87a67b 100644 --- a/providers/tests/system/dbt/cloud/example_dbt_cloud.py +++ b/providers/tests/system/dbt/cloud/example_dbt_cloud.py @@ -18,6 +18,8 @@ from datetime import datetime +from tests_common.test_utils.system_tests import get_test_env_id + from airflow.models import DAG from airflow.operators.empty import EmptyOperator from airflow.providers.dbt.cloud.operators.dbt import ( @@ -28,8 +30,6 @@ from airflow.providers.dbt.cloud.sensors.dbt import DbtCloudJobRunSensor from airflow.utils.edgemodifier import Label -from dev.tests_common.test_utils.system_tests import get_test_env_id - ENV_ID = get_test_env_id() DAG_ID = "example_dbt_cloud" @@ -92,13 +92,13 @@ # trigger_job_run2 >> job_run_sensor # trigger_job_run2 >> job_run_sensor_deferred - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/dingding/example_dingding.py b/providers/tests/system/dingding/example_dingding.py index 4ecb41c68cc91..536f28fcc0d41 100644 --- a/providers/tests/system/dingding/example_dingding.py +++ b/providers/tests/system/dingding/example_dingding.py @@ -199,13 +199,13 @@ def failure_callback(context): >> msg_failure_callback ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/docker/example_docker.py b/providers/tests/system/docker/example_docker.py index 108813236ac62..14fa3fa3203c7 100644 --- a/providers/tests/system/docker/example_docker.py +++ b/providers/tests/system/docker/example_docker.py @@ -57,7 +57,7 @@ t1 >> [t2, t3] >> t4 ) -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/docker/example_docker_copy_data.py b/providers/tests/system/docker/example_docker_copy_data.py index c270408390cfa..584b16b8e096b 100644 --- a/providers/tests/system/docker/example_docker_copy_data.py +++ b/providers/tests/system/docker/example_docker_copy_data.py @@ -103,7 +103,7 @@ t_is_data_available >> t_move >> t_print ) -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/docker/example_docker_swarm.py b/providers/tests/system/docker/example_docker_swarm.py index 3a68dcea81673..a45d1bba351ca 100644 --- a/providers/tests/system/docker/example_docker_swarm.py +++ b/providers/tests/system/docker/example_docker_swarm.py @@ -47,7 +47,7 @@ t1 ) -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/docker/example_taskflow_api_docker_virtualenv.py b/providers/tests/system/docker/example_taskflow_api_docker_virtualenv.py index 3c8b528606e02..b026d9e950fc7 100644 --- a/providers/tests/system/docker/example_taskflow_api_docker_virtualenv.py +++ b/providers/tests/system/docker/example_taskflow_api_docker_virtualenv.py @@ -116,9 +116,15 @@ def load(total_order_value: float): tutorial_dag = tutorial_taskflow_api_docker_virtualenv() # [END dag_invocation] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) # [END tutorial] + + +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag) diff --git a/providers/tests/system/elasticsearch/example_elasticsearch_query.py b/providers/tests/system/elasticsearch/example_elasticsearch_query.py index 93471d874c3c9..f4450b8e335cf 100644 --- a/providers/tests/system/elasticsearch/example_elasticsearch_query.py +++ b/providers/tests/system/elasticsearch/example_elasticsearch_query.py @@ -81,7 +81,7 @@ def use_elasticsearch_hook(): task_id="print_data_from_elasticsearch", python_callable=use_elasticsearch_hook ) -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/ftp/example_ftp.py b/providers/tests/system/ftp/example_ftp.py index 8fb2e38806ae4..5b9bfe65db4fa 100644 --- a/providers/tests/system/ftp/example_ftp.py +++ b/providers/tests/system/ftp/example_ftp.py @@ -87,13 +87,13 @@ ftp_put >> ftp_get ftps_put >> ftps_get - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/github/example_github.py b/providers/tests/system/github/example_github.py index 70eb8bf902f04..b076647177cc9 100644 --- a/providers/tests/system/github/example_github.py +++ b/providers/tests/system/github/example_github.py @@ -100,7 +100,7 @@ def tag_checker(repo: Any, tag_name: str) -> bool | None: # [END howto_operator_list_tags_github] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/ads/example_ads.py b/providers/tests/system/google/ads/example_ads.py index 162fd45334f67..f55c7306f33aa 100644 --- a/providers/tests/system/google/ads/example_ads.py +++ b/providers/tests/system/google/ads/example_ads.py @@ -117,14 +117,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/automl/example_automl_dataset.py b/providers/tests/system/google/cloud/automl/example_automl_dataset.py index 49de111ffb358..d1305da4fb081 100644 --- a/providers/tests/system/google/cloud/automl/example_automl_dataset.py +++ b/providers/tests/system/google/cloud/automl/example_automl_dataset.py @@ -162,14 +162,14 @@ def upload_updated_csv_file_to_gcs(): >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/automl/example_automl_translation.py b/providers/tests/system/google/cloud/automl/example_automl_translation.py index 60dbf782f281e..e758bdf113e48 100644 --- a/providers/tests/system/google/cloud/automl/example_automl_translation.py +++ b/providers/tests/system/google/cloud/automl/example_automl_translation.py @@ -188,14 +188,14 @@ def upload_csv_file_to_gcs(): >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/automl/example_automl_video_classification.py b/providers/tests/system/google/cloud/automl/example_automl_video_classification.py index 36853c3826037..538831d307ca5 100644 --- a/providers/tests/system/google/cloud/automl/example_automl_video_classification.py +++ b/providers/tests/system/google/cloud/automl/example_automl_video_classification.py @@ -158,13 +158,13 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/automl/example_automl_video_tracking.py b/providers/tests/system/google/cloud/automl/example_automl_video_tracking.py index 65718f10b7697..3876e9b0a39de 100644 --- a/providers/tests/system/google/cloud/automl/example_automl_video_tracking.py +++ b/providers/tests/system/google/cloud/automl/example_automl_video_tracking.py @@ -158,13 +158,13 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/automl/example_automl_vision_classification.py b/providers/tests/system/google/cloud/automl/example_automl_vision_classification.py index 40e68d3291ce8..e329febf5d9fc 100644 --- a/providers/tests/system/google/cloud/automl/example_automl_vision_classification.py +++ b/providers/tests/system/google/cloud/automl/example_automl_vision_classification.py @@ -132,13 +132,13 @@ >> delete_image_dataset ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/automl/example_automl_vision_object_detection.py b/providers/tests/system/google/cloud/automl/example_automl_vision_object_detection.py index 334e154cea4d1..b87b42008d943 100644 --- a/providers/tests/system/google/cloud/automl/example_automl_vision_object_detection.py +++ b/providers/tests/system/google/cloud/automl/example_automl_vision_object_detection.py @@ -134,13 +134,13 @@ >> delete_image_dataset ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/azure/example_azure_blob_to_gcs.py b/providers/tests/system/google/cloud/azure/example_azure_blob_to_gcs.py index 837cb99e5ffc1..e915642b70204 100644 --- a/providers/tests/system/google/cloud/azure/example_azure_blob_to_gcs.py +++ b/providers/tests/system/google/cloud/azure/example_azure_blob_to_gcs.py @@ -59,13 +59,13 @@ (wait_for_blob >> transfer_files_to_gcs) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py b/providers/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py index a897b6dc7d3d1..45172fa2070db 100644 --- a/providers/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py +++ b/providers/tests/system/google/cloud/azure/example_azure_fileshare_to_gcs.py @@ -81,14 +81,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py index 87e13a2c25165..7be37b8631f3b 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_dataset.py @@ -85,14 +85,14 @@ >> delete_dataset ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py index b387563069a94..37677a1514ac0 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_dts.py @@ -180,14 +180,14 @@ delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_operations.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_operations.py index 5e487f5a93e2a..308c4337684da 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_operations.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_operations.py @@ -97,14 +97,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_operations_location.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_operations_location.py index 2b026070a06b4..d7547e2b810de 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_operations_location.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_operations_location.py @@ -77,14 +77,14 @@ >> delete_dataset_with_location ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py index cd31ea05bcdcb..0e4d930442924 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_queries.py @@ -247,7 +247,7 @@ execute_insert_query >> [check_count, check_value, check_interval] >> delete_dataset execute_insert_query >> [column_check, table_check] >> delete_dataset - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG @@ -257,7 +257,7 @@ globals()[DAG_ID] = dag for dag in DAGS_LIST: - from dev.tests_common.test_utils.system_tests import get_test_run + from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py index 012eba080d1a4..104f09ea3335b 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_queries_async.py @@ -259,14 +259,14 @@ insert_query_job >> execute_long_running_query >> check_value >> check_interval [check_count, check_interval, bigquery_execute_multi_query, get_data_result] >> delete_dataset - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py index aa8d36f5ed893..760275f904c95 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_sensors.py @@ -165,14 +165,14 @@ >> delete_dataset ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py index 9c1432a0a8da5..588e33d368d19 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_tables.py @@ -224,14 +224,14 @@ >> delete_dataset ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_to_bigquery.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_bigquery.py index cb1fa63498da9..879d8aa3fb284 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_to_bigquery.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_bigquery.py @@ -100,14 +100,14 @@ >> delete_dataset ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py index 21acb27f161db..76684a1c05c38 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs.py @@ -98,14 +98,14 @@ >> [delete_bucket, delete_dataset] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs_async.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs_async.py index fdf084601c770..2bc4846fea343 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs_async.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_gcs_async.py @@ -97,14 +97,14 @@ >> [delete_bucket, delete_dataset] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mssql.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mssql.py index 51e713560bc23..3fd5e9259548e 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mssql.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mssql.py @@ -326,13 +326,13 @@ def delete_connection(connection_id: str) -> None: >> delete_persistent_disk ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mysql.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mysql.py index a3b726098773d..623924c2c96b2 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mysql.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_mysql.py @@ -88,13 +88,13 @@ >> delete_dataset ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_to_postgres.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_postgres.py index ca4e3148d0a18..54f5efc13d93e 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_to_postgres.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_to_postgres.py @@ -362,13 +362,13 @@ def delete_connection(connection_id: str) -> None: >> delete_persistent_disk ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_transfer.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_transfer.py index 73d55710c9db9..39f1eea2bd1d5 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_transfer.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_transfer.py @@ -117,13 +117,13 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigquery/example_bigquery_value_check.py b/providers/tests/system/google/cloud/bigquery/example_bigquery_value_check.py index 3872ca092321f..5a51ad310135f 100644 --- a/providers/tests/system/google/cloud/bigquery/example_bigquery_value_check.py +++ b/providers/tests/system/google/cloud/bigquery/example_bigquery_value_check.py @@ -131,8 +131,8 @@ >> delete_dataset ) - from dev.tests_common.test_utils.system_tests import get_test_run - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.system_tests import get_test_run + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG @@ -142,7 +142,7 @@ test_run = get_test_run(dag) -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/bigtable/example_bigtable.py b/providers/tests/system/google/cloud/bigtable/example_bigtable.py index 77abc49fae258..b1f3469f4fcb9 100644 --- a/providers/tests/system/google/cloud/bigtable/example_bigtable.py +++ b/providers/tests/system/google/cloud/bigtable/example_bigtable.py @@ -225,14 +225,14 @@ def update_clusters_and_instance(): >> [delete_instance_task, delete_instance_task2] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py b/providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py index 84dc031a5268f..322c6cd7eceae 100644 --- a/providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py +++ b/providers/tests/system/google/cloud/cloud_batch/example_cloud_batch.py @@ -187,13 +187,13 @@ def _create_job(): ([submit1, submit2] >> list_tasks >> assert_tasks >> list_jobs >> get_name >> [delete_job1, delete_job2]) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/cloud_build/example_cloud_build.py b/providers/tests/system/google/cloud/cloud_build/example_cloud_build.py index bf2391413a1c3..319047fbaa63a 100644 --- a/providers/tests/system/google/cloud/cloud_build/example_cloud_build.py +++ b/providers/tests/system/google/cloud/cloud_build/example_cloud_build.py @@ -267,14 +267,14 @@ def no_wait_cancel_retry_get_deferrable(): ] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py b/providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py index f6873da9c5680..e799af6650d1e 100644 --- a/providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py +++ b/providers/tests/system/google/cloud/cloud_build/example_cloud_build_trigger.py @@ -182,14 +182,14 @@ def get_project_number(): >> list_build_triggers ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/cloud_functions/example_functions.py b/providers/tests/system/google/cloud/cloud_functions/example_functions.py index fd0b8ea5f5a4c..675224980469f 100644 --- a/providers/tests/system/google/cloud/cloud_functions/example_functions.py +++ b/providers/tests/system/google/cloud/cloud_functions/example_functions.py @@ -122,14 +122,14 @@ delete_function, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py b/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py index 670a850b04ac4..5d425b8b0e3cc 100644 --- a/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py +++ b/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_memcached.py @@ -198,14 +198,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py b/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py index 3d8ebf6f287dc..cb5551f070405 100644 --- a/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py +++ b/providers/tests/system/google/cloud/cloud_memorystore/example_cloud_memorystore_redis.py @@ -266,14 +266,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/cloud_run/example_cloud_run.py b/providers/tests/system/google/cloud/cloud_run/example_cloud_run.py index 08145e0336ede..368d835ddb6ad 100644 --- a/providers/tests/system/google/cloud/cloud_run/example_cloud_run.py +++ b/providers/tests/system/google/cloud/cloud_run/example_cloud_run.py @@ -367,13 +367,13 @@ def _create_job_instance_with_label(): >> (delete_job1, delete_job2, delete_job3) ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py b/providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py index 43318c4b14306..8d87bcbc10033 100644 --- a/providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py +++ b/providers/tests/system/google/cloud/cloud_run/example_cloud_run_service.py @@ -84,14 +84,14 @@ def _create_service(): >> delete_cloud_run_service ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py index 52414b2784b27..57da5f43c1d64 100644 --- a/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py +++ b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql.py @@ -294,14 +294,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py index 6cf7e0c08751c..3d97fad5aacf9 100644 --- a/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py +++ b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query.py @@ -541,13 +541,13 @@ def delete_connection(connection_id: str) -> None: # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py index db77f18339af0..69ec1784fa47c 100644 --- a/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py +++ b/providers/tests/system/google/cloud/cloud_sql/example_cloud_sql_query_ssl.py @@ -491,13 +491,13 @@ def delete_secret(ssl_secret_id, db_type: str) -> None: # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/composer/example_cloud_composer.py b/providers/tests/system/google/cloud/composer/example_cloud_composer.py index 266a7e4a444ad..f04a2c8596a1e 100644 --- a/providers/tests/system/google/cloud/composer/example_cloud_composer.py +++ b/providers/tests/system/google/cloud/composer/example_cloud_composer.py @@ -214,14 +214,14 @@ [delete_env, defer_delete_env], ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/compute/example_compute.py b/providers/tests/system/google/cloud/compute/example_compute.py index 8343920890708..82f2b008c14dc 100644 --- a/providers/tests/system/google/cloud/compute/example_compute.py +++ b/providers/tests/system/google/cloud/compute/example_compute.py @@ -267,14 +267,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/compute/example_compute_igm.py b/providers/tests/system/google/cloud/compute/example_compute_igm.py index 11357452dfc69..5f292958c2308 100644 --- a/providers/tests/system/google/cloud/compute/example_compute_igm.py +++ b/providers/tests/system/google/cloud/compute/example_compute_igm.py @@ -236,14 +236,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/compute/example_compute_ssh.py b/providers/tests/system/google/cloud/compute/example_compute_ssh.py index 5b7d1523017ff..ffba97e91db15 100644 --- a/providers/tests/system/google/cloud/compute/example_compute_ssh.py +++ b/providers/tests/system/google/cloud/compute/example_compute_ssh.py @@ -138,14 +138,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/compute/example_compute_ssh_os_login.py b/providers/tests/system/google/cloud/compute/example_compute_ssh_os_login.py index 63ed278a08c5a..89a9223e7069c 100644 --- a/providers/tests/system/google/cloud/compute/example_compute_ssh_os_login.py +++ b/providers/tests/system/google/cloud/compute/example_compute_ssh_os_login.py @@ -146,14 +146,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/compute/example_compute_ssh_parallel.py b/providers/tests/system/google/cloud/compute/example_compute_ssh_parallel.py index 3ab61afc0c259..e8b8767fffb79 100644 --- a/providers/tests/system/google/cloud/compute/example_compute_ssh_parallel.py +++ b/providers/tests/system/google/cloud/compute/example_compute_ssh_parallel.py @@ -139,14 +139,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py index 75b1ad93dd479..78549f3735073 100644 --- a/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_deidentify_content.py @@ -157,14 +157,14 @@ >> delete_template ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py index fb2198b16793f..72e35d3234491 100644 --- a/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_info_types.py @@ -154,14 +154,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py index 22b259582f4ac..0d48b2fc19545 100644 --- a/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_inspect_template.py @@ -115,14 +115,14 @@ >> delete_template ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job.py index b7dc2ac1a4053..27db1730ad0bc 100644 --- a/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job.py @@ -90,14 +90,14 @@ (create_job >> list_jobs >> get_job >> cancel_job >> delete_job) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py index 902875f7b5921..11fd48e1e4e05 100644 --- a/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py +++ b/providers/tests/system/google/cloud/data_loss_prevention/example_dlp_job_trigger.py @@ -95,14 +95,14 @@ (create_trigger >> list_triggers >> get_trigger >> update_trigger >> delete_trigger) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataflow/example_dataflow_go.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_go.py index 57e5941e3ed6b..92e779f69d66d 100644 --- a/providers/tests/system/google/cloud/dataflow/example_dataflow_go.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_go.py @@ -149,7 +149,7 @@ def check_autoscaling_event(autoscaling_events: list[dict]) -> bool: ) -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py index 3629194e6ee9b..66d0ce0995c73 100644 --- a/providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_java.py @@ -148,14 +148,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python.py index 229373ed1bf3f..70cd5db9e9dd4 100644 --- a/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python.py @@ -110,14 +110,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py index 31f1cd026f19a..9bca9b02871b9 100644 --- a/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_native_python_async.py @@ -176,14 +176,14 @@ def check_autoscaling_event(autoscaling_events: list[dict]) -> bool: >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py index cfd5e06b029b6..800540eb4d70a 100644 --- a/providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_pipeline.py @@ -137,14 +137,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py index 1a6d0aec3923b..c6303d74231be 100644 --- a/providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_sensors_deferrable.py @@ -177,14 +177,14 @@ def check_autoscaling_event(autoscaling_events: list[dict]) -> bool: >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataflow/example_dataflow_sql.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_sql.py index e3b0ee711a921..2ba0bf0534c59 100644 --- a/providers/tests/system/google/cloud/dataflow/example_dataflow_sql.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_sql.py @@ -137,13 +137,13 @@ >> delete_bq_dataset ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataflow/example_dataflow_streaming_python.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_streaming_python.py index a15dffb3a3282..4e7a7ccebdd1b 100644 --- a/providers/tests/system/google/cloud/dataflow/example_dataflow_streaming_python.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_streaming_python.py @@ -114,14 +114,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataflow/example_dataflow_template.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_template.py index 86545514607aa..4b5bccacb0fa1 100644 --- a/providers/tests/system/google/cloud/dataflow/example_dataflow_template.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_template.py @@ -158,14 +158,14 @@ delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataflow/example_dataflow_yaml.py b/providers/tests/system/google/cloud/dataflow/example_dataflow_yaml.py index 2243ad695bdd4..14db8b8936223 100644 --- a/providers/tests/system/google/cloud/dataflow/example_dataflow_yaml.py +++ b/providers/tests/system/google/cloud/dataflow/example_dataflow_yaml.py @@ -162,13 +162,13 @@ >> delete_bq_dataset ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataform/example_dataform.py b/providers/tests/system/google/cloud/dataform/example_dataform.py index b61247a877008..e88a37caef2e6 100644 --- a/providers/tests/system/google/cloud/dataform/example_dataform.py +++ b/providers/tests/system/google/cloud/dataform/example_dataform.py @@ -328,13 +328,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/datafusion/example_datafusion.py b/providers/tests/system/google/cloud/datafusion/example_datafusion.py index a61fbbd01da91..2181d2b2badab 100644 --- a/providers/tests/system/google/cloud/datafusion/example_datafusion.py +++ b/providers/tests/system/google/cloud/datafusion/example_datafusion.py @@ -340,13 +340,13 @@ def get_artifacts_versions(ti=None): >> [delete_bucket1, delete_bucket2] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/datapipelines/example_datapipeline.py b/providers/tests/system/google/cloud/datapipelines/example_datapipeline.py index 91fb7eed26d2a..0c0c430eae150 100644 --- a/providers/tests/system/google/cloud/datapipelines/example_datapipeline.py +++ b/providers/tests/system/google/cloud/datapipelines/example_datapipeline.py @@ -138,14 +138,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataplex/example_dataplex.py b/providers/tests/system/google/cloud/dataplex/example_dataplex.py index 34e9a0fd05c19..f5ce1bbd41780 100644 --- a/providers/tests/system/google/cloud/dataplex/example_dataplex.py +++ b/providers/tests/system/google/cloud/dataplex/example_dataplex.py @@ -206,14 +206,14 @@ delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py b/providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py index 44f4af2435925..da447a8ced5e4 100644 --- a/providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py +++ b/providers/tests/system/google/cloud/dataplex/example_dataplex_dp.py @@ -329,14 +329,14 @@ [delete_lake, delete_dataset], ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py b/providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py index d31d574755c90..1a1910757fca3 100644 --- a/providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py +++ b/providers/tests/system/google/cloud/dataplex/example_dataplex_dq.py @@ -363,14 +363,14 @@ [delete_lake, delete_dataset], ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataprep/example_dataprep.py b/providers/tests/system/google/cloud/dataprep/example_dataprep.py index cdc736a41c66b..edffb69e580b4 100644 --- a/providers/tests/system/google/cloud/dataprep/example_dataprep.py +++ b/providers/tests/system/google/cloud/dataprep/example_dataprep.py @@ -307,14 +307,14 @@ def delete_connection(connection_id: str) -> None: [delete_bucket_task, delete_connection_task], ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py index 54481f43ca762..c88a1c3c6803c 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch.py @@ -176,13 +176,13 @@ >> delete_batch_4 ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py index 79d3766995e3d..17e5f99b52585 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_deferrable.py @@ -91,14 +91,14 @@ >> delete_batch ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py index a8ed6b13de552..5bf500aeef3c0 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_batch_persistent.py @@ -141,14 +141,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py index 0eb27762c2f98..e64a0914941c9 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py @@ -121,13 +121,13 @@ >> delete_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py index 8ae262dbf624c..2f7153d44059e 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_deferrable.py @@ -137,14 +137,14 @@ >> delete_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py index 3eacc905bc703..9e6c228a9d90d 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_diagnose.py @@ -115,14 +115,14 @@ >> delete_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py index f990363c48004..0562be439b1c6 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_generator.py @@ -135,14 +135,14 @@ >> [delete_cluster, delete_bucket] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py index 2e4b698573bf2..c68bdd013bb2d 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_start_stop.py @@ -111,13 +111,13 @@ >> delete_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py index 7ed0061d9947c..620019cd4856b 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_cluster_update.py @@ -120,14 +120,14 @@ >> delete_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_flink.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_flink.py index ce1a6fc4451e9..34bcdb17e5479 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_flink.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_flink.py @@ -124,14 +124,14 @@ >> [delete_cluster, delete_bucket] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py index bb1044da62414..8048a23283e4c 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_gke.py @@ -144,13 +144,13 @@ >> delete_gke_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_hadoop.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_hadoop.py index c1f5423041fa4..e4370b2f7955c 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_hadoop.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_hadoop.py @@ -123,14 +123,14 @@ >> [delete_cluster, delete_bucket] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py index 6aadceb552f61..bcddd461560c5 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_hive.py @@ -124,14 +124,14 @@ >> delete_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_pig.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_pig.py index 762ca2bc73fd2..848138ae54732 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_pig.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_pig.py @@ -108,14 +108,14 @@ >> delete_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_presto.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_presto.py index f5bdf25732a5e..ea8ccb231df9d 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_presto.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_presto.py @@ -115,14 +115,14 @@ >> delete_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py index 71e7245b7d728..fdc22d8cae6d9 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_pyspark.py @@ -140,14 +140,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_spark.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark.py index e43a56d230bf4..5ff5ce8ed4ed2 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_spark.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark.py @@ -111,14 +111,14 @@ >> delete_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_async.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_async.py index 475e9912e8562..df3d969079255 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_async.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_async.py @@ -121,14 +121,14 @@ >> delete_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py index 5e6d0b773af6f..20b68b0574b89 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_deferrable.py @@ -112,14 +112,14 @@ >> delete_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py index aef860a0e545a..3b34b8dbb298d 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_spark_sql.py @@ -108,14 +108,14 @@ >> delete_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_sparkr.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_sparkr.py index 7b416ade38648..39890a501b0bc 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_sparkr.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_sparkr.py @@ -136,14 +136,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_trino.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_trino.py index d3f7f2a1a3a4c..3f366f6088d95 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_trino.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_trino.py @@ -117,14 +117,14 @@ >> delete_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow.py index ab465a124d5fd..eba4ca34a7f33 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow.py @@ -104,14 +104,14 @@ >> instantiate_inline_workflow_template ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow_deferrable.py b/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow_deferrable.py index e2319a35a124f..26dd27efcb836 100644 --- a/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow_deferrable.py +++ b/providers/tests/system/google/cloud/dataproc/example_dataproc_workflow_deferrable.py @@ -108,14 +108,14 @@ >> instantiate_inline_workflow_template_async ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py index ee3b5b70ad519..cbd612ba80162 100644 --- a/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py +++ b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore.py @@ -192,14 +192,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py index 12af45d210653..dddb69bfd0827 100644 --- a/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py +++ b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py @@ -126,14 +126,14 @@ ) (create_service >> backup_service >> list_backups >> restore_service >> delete_backup >> delete_service) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py index 6b06b868c73e2..f3317b9ce7f19 100644 --- a/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py +++ b/providers/tests/system/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py @@ -232,14 +232,14 @@ def get_hive_warehouse_bucket(**kwargs): >> [delete_dataproc_cluster, delete_metastore_service, delete_warehouse_bucket] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/datastore/example_datastore_commit.py b/providers/tests/system/google/cloud/datastore/example_datastore_commit.py index 430d176ed733c..60ec53cf1f8a4 100644 --- a/providers/tests/system/google/cloud/datastore/example_datastore_commit.py +++ b/providers/tests/system/google/cloud/datastore/example_datastore_commit.py @@ -160,14 +160,14 @@ [delete_bucket, delete_export_operation, delete_import_operation], ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/datastore/example_datastore_query.py b/providers/tests/system/google/cloud/datastore/example_datastore_query.py index 0b282a1feb34e..f6908823a8b28 100644 --- a/providers/tests/system/google/cloud/datastore/example_datastore_query.py +++ b/providers/tests/system/google/cloud/datastore/example_datastore_query.py @@ -80,14 +80,14 @@ allocate_ids >> begin_transaction_query >> run_query - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/datastore/example_datastore_rollback.py b/providers/tests/system/google/cloud/datastore/example_datastore_rollback.py index 44d0dc1709418..0634eabeb553d 100644 --- a/providers/tests/system/google/cloud/datastore/example_datastore_rollback.py +++ b/providers/tests/system/google/cloud/datastore/example_datastore_rollback.py @@ -63,14 +63,14 @@ begin_transaction_to_rollback >> rollback_transaction - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_calendar_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_calendar_to_gcs.py index f44e7315e2d18..3b3cd028897df 100644 --- a/providers/tests/system/google/cloud/gcs/example_calendar_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_calendar_to_gcs.py @@ -115,13 +115,13 @@ def delete_connection(connection_id: str) -> None: >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_firestore.py b/providers/tests/system/google/cloud/gcs/example_firestore.py index 89e0e3fe1c6d9..1860d30c0d5aa 100644 --- a/providers/tests/system/google/cloud/gcs/example_firestore.py +++ b/providers/tests/system/google/cloud/gcs/example_firestore.py @@ -170,14 +170,14 @@ >> [delete_dataset, delete_bucket] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_gcs_acl.py b/providers/tests/system/google/cloud/gcs/example_gcs_acl.py index 7843c0877c1d9..7000ac4d45551 100644 --- a/providers/tests/system/google/cloud/gcs/example_gcs_acl.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_acl.py @@ -108,14 +108,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_gcs_copy_delete.py b/providers/tests/system/google/cloud/gcs/example_gcs_copy_delete.py index 6bbec540df9f2..62c31d80b286b 100644 --- a/providers/tests/system/google/cloud/gcs/example_gcs_copy_delete.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_copy_delete.py @@ -122,14 +122,14 @@ [delete_bucket_src, delete_bucket_dst], ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_gcs_sensor.py b/providers/tests/system/google/cloud/gcs/example_gcs_sensor.py index 2d4da2887d4e5..a07aff849e465 100644 --- a/providers/tests/system/google/cloud/gcs/example_gcs_sensor.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_sensor.py @@ -201,14 +201,14 @@ def mode_setter(self, value): delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py index 5d7e393bff29e..efea506c909d1 100644 --- a/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery.py @@ -82,13 +82,13 @@ >> delete_test_dataset ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py index 1ca531b818008..7572f2bc559d3 100644 --- a/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_bigquery_async.py @@ -174,13 +174,13 @@ >> delete_test_dataset_delimiter ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py index ac1d70307a535..c4c812868efa3 100644 --- a/providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_gcs.py @@ -275,14 +275,14 @@ def delete_work_dir(create_workdir_result: str) -> None: [delete_bucket_src, delete_bucket_dst, delete_work_dir(create_workdir_task)], ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py index bfb200a15a63d..c15e24ff3fbac 100644 --- a/providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_gdrive.py @@ -204,13 +204,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_gcs_to_sheets.py b/providers/tests/system/google/cloud/gcs/example_gcs_to_sheets.py index 69947698cd7bb..991f8347f5da2 100644 --- a/providers/tests/system/google/cloud/gcs/example_gcs_to_sheets.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_to_sheets.py @@ -131,13 +131,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_gcs_transform.py b/providers/tests/system/google/cloud/gcs/example_gcs_transform.py index 0b59119c6bc1e..421272f8b5775 100644 --- a/providers/tests/system/google/cloud/gcs/example_gcs_transform.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_transform.py @@ -95,14 +95,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py b/providers/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py index 3a5b03695ee40..1c7e68b9d1252 100644 --- a/providers/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_transform_timespan.py @@ -114,14 +114,14 @@ [delete_bucket_src, delete_bucket_dst], ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py b/providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py index 6907a29fb5389..abde62d35bfaa 100644 --- a/providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py +++ b/providers/tests/system/google/cloud/gcs/example_gcs_upload_download.py @@ -92,14 +92,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py index 6ee188bd957ec..39633a8fc2bd4 100644 --- a/providers/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_gdrive_to_gcs.py @@ -165,13 +165,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_mssql_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_mssql_to_gcs.py index bc6ac79f1e0b1..7aece3764c7f1 100644 --- a/providers/tests/system/google/cloud/gcs/example_mssql_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_mssql_to_gcs.py @@ -78,14 +78,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_mysql_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_mysql_to_gcs.py index cc53337e56934..ae3fea2c236e5 100644 --- a/providers/tests/system/google/cloud/gcs/example_mysql_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_mysql_to_gcs.py @@ -295,13 +295,13 @@ def delete_connection(connection_id: str) -> None: mysql_to_gcs >> [delete_gcs_bucket, delete_firewall_rule, delete_gce_instance, delete_connection_task] delete_gce_instance >> delete_persistent_disk - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_oracle_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_oracle_to_gcs.py index c727eef1f66fb..ba4d7025acfab 100644 --- a/providers/tests/system/google/cloud/gcs/example_oracle_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_oracle_to_gcs.py @@ -64,13 +64,13 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_s3_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_s3_to_gcs.py index 487dbbce552e2..59f88f78b276a 100644 --- a/providers/tests/system/google/cloud/gcs/example_s3_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_s3_to_gcs.py @@ -110,14 +110,14 @@ def upload_file(): >> delete_gcs_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py index 994e7e8242e1e..b2c250426528a 100644 --- a/providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_sftp_to_gcs.py @@ -119,14 +119,14 @@ delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_sheets.py b/providers/tests/system/google/cloud/gcs/example_sheets.py index 7d17379819168..001ebb69d6820 100644 --- a/providers/tests/system/google/cloud/gcs/example_sheets.py +++ b/providers/tests/system/google/cloud/gcs/example_sheets.py @@ -145,13 +145,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_sheets_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_sheets_to_gcs.py index 08688c332563a..7c546060333c1 100644 --- a/providers/tests/system/google/cloud/gcs/example_sheets_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_sheets_to_gcs.py @@ -120,13 +120,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py b/providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py index af92e550481f7..e76a66ea22be3 100644 --- a/providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py +++ b/providers/tests/system/google/cloud/gcs/example_trino_to_gcs.py @@ -221,13 +221,13 @@ def safe_name(s: str) -> str: >> [delete_dataset, delete_bucket] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py index e9fe3f6836b94..36d21ff701eb3 100644 --- a/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine.py @@ -113,14 +113,14 @@ create_cluster >> [pod_task, pod_task_xcom] >> delete_cluster pod_task_xcom >> pod_task_xcom_result - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py index f5cb8f570754f..9283f77dbd2a9 100644 --- a/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_async.py @@ -116,14 +116,14 @@ create_cluster >> [pod_task, pod_task_xcom_async] >> delete_cluster pod_task_xcom_async >> pod_task_xcom_result - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py index a4c56c17e5baf..9ff3ef32a7c09 100644 --- a/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_job.py @@ -179,14 +179,14 @@ delete_cluster, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py index 06c23432a923a..44dbaae5537b2 100644 --- a/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_kueue.py @@ -179,14 +179,14 @@ >> delete_cluster ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py index 784ba994862b7..28a6ac985b6ee 100644 --- a/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py +++ b/providers/tests/system/google/cloud/kubernetes_engine/example_kubernetes_engine_resource.py @@ -101,14 +101,14 @@ create_cluster >> create_resource_task >> delete_resource_task >> delete_cluster - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/life_sciences/example_life_sciences.py b/providers/tests/system/google/cloud/life_sciences/example_life_sciences.py index 170eeb39ad1aa..e5024562b70a8 100644 --- a/providers/tests/system/google/cloud/life_sciences/example_life_sciences.py +++ b/providers/tests/system/google/cloud/life_sciences/example_life_sciences.py @@ -131,14 +131,14 @@ delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/ml_engine/example_mlengine.py b/providers/tests/system/google/cloud/ml_engine/example_mlengine.py index f0eaf7cb30171..a735d3015ab55 100644 --- a/providers/tests/system/google/cloud/ml_engine/example_mlengine.py +++ b/providers/tests/system/google/cloud/ml_engine/example_mlengine.py @@ -280,13 +280,13 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/natural_language/example_natural_language.py b/providers/tests/system/google/cloud/natural_language/example_natural_language.py index cc1aba0f165c7..4eb542d9aecce 100644 --- a/providers/tests/system/google/cloud/natural_language/example_natural_language.py +++ b/providers/tests/system/google/cloud/natural_language/example_natural_language.py @@ -119,13 +119,13 @@ analyze_sentiment >> analyze_sentiment_result analyze_classify_text >> analyze_classify_text_result - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/pubsub/example_pubsub.py b/providers/tests/system/google/cloud/pubsub/example_pubsub.py index 93ab1cde6e0a8..9ff909a3b401f 100644 --- a/providers/tests/system/google/cloud/pubsub/example_pubsub.py +++ b/providers/tests/system/google/cloud/pubsub/example_pubsub.py @@ -146,14 +146,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/pubsub/example_pubsub_deferrable.py b/providers/tests/system/google/cloud/pubsub/example_pubsub_deferrable.py index a902ea5617f6d..f889ff16a286e 100644 --- a/providers/tests/system/google/cloud/pubsub/example_pubsub_deferrable.py +++ b/providers/tests/system/google/cloud/pubsub/example_pubsub_deferrable.py @@ -101,14 +101,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/spanner/example_spanner.py b/providers/tests/system/google/cloud/spanner/example_spanner.py index b11a5cd61f92c..ba9f727e1f6dc 100644 --- a/providers/tests/system/google/cloud/spanner/example_spanner.py +++ b/providers/tests/system/google/cloud/spanner/example_spanner.py @@ -161,14 +161,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py b/providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py index f2382a6053aa4..16603bf65a86d 100644 --- a/providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py +++ b/providers/tests/system/google/cloud/speech_to_text/example_speech_to_text.py @@ -89,14 +89,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/sql_to_sheets/example_sql_to_sheets.py b/providers/tests/system/google/cloud/sql_to_sheets/example_sql_to_sheets.py index d34531ba52a0a..65c88b38ccf30 100644 --- a/providers/tests/system/google/cloud/sql_to_sheets/example_sql_to_sheets.py +++ b/providers/tests/system/google/cloud/sql_to_sheets/example_sql_to_sheets.py @@ -315,13 +315,13 @@ def delete_connection(connection_id: str) -> None: ] delete_gce_instance >> delete_persistent_disk - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/stackdriver/example_stackdriver.py b/providers/tests/system/google/cloud/stackdriver/example_stackdriver.py index 202c9bcfbdccf..e720ac1f16594 100644 --- a/providers/tests/system/google/cloud/stackdriver/example_stackdriver.py +++ b/providers/tests/system/google/cloud/stackdriver/example_stackdriver.py @@ -228,13 +228,13 @@ delete_alert_policy_2, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py index 4d6a010954401..cf76f6eddd58b 100644 --- a/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py +++ b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_aws.py @@ -254,14 +254,14 @@ ] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py index b2cef2f831b5f..960a9c0679a9b 100644 --- a/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py +++ b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcp.py @@ -195,13 +195,13 @@ >> [delete_transfer, delete_bucket_src, delete_bucket_dst] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py index e670eba755428..46289ea66ca23 100644 --- a/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py +++ b/providers/tests/system/google/cloud/storage_transfer/example_cloud_storage_transfer_service_gcs_to_gcs.py @@ -106,13 +106,13 @@ >> [delete_bucket_src, delete_bucket_dst] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/tasks/example_queue.py b/providers/tests/system/google/cloud/tasks/example_queue.py index 9797a29d04e22..614bdfd352073 100644 --- a/providers/tests/system/google/cloud/tasks/example_queue.py +++ b/providers/tests/system/google/cloud/tasks/example_queue.py @@ -161,14 +161,14 @@ def generate_random_string(): delete_queue, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/tasks/example_tasks.py b/providers/tests/system/google/cloud/tasks/example_tasks.py index 0eae95fd1075d..f4ab2f53b0bb4 100644 --- a/providers/tests/system/google/cloud/tasks/example_tasks.py +++ b/providers/tests/system/google/cloud/tasks/example_tasks.py @@ -152,14 +152,14 @@ def generate_random_string(): random_string, create_queue, create_task, tasks_get, list_tasks, run_task, delete_task, delete_queue ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py b/providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py index 0227049508a74..437db735c2d57 100644 --- a/providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py +++ b/providers/tests/system/google/cloud/text_to_speech/example_text_to_speech.py @@ -78,14 +78,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py b/providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py index 8d3f19416313a..dd1a5945f2cc7 100644 --- a/providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py +++ b/providers/tests/system/google/cloud/transfers/example_gcs_to_sftp.py @@ -171,14 +171,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/transfers/example_gdrive_to_local.py b/providers/tests/system/google/cloud/transfers/example_gdrive_to_local.py index abfcbabce0852..2e5c6f8e8f7eb 100644 --- a/providers/tests/system/google/cloud/transfers/example_gdrive_to_local.py +++ b/providers/tests/system/google/cloud/transfers/example_gdrive_to_local.py @@ -167,13 +167,13 @@ def delete_connection(connection_id: str) -> None: >> [delete_bucket, delete_connection_task] ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/transfers/example_postgres_to_gcs.py b/providers/tests/system/google/cloud/transfers/example_postgres_to_gcs.py index 91bde027982ec..0f9d5be7eaeec 100644 --- a/providers/tests/system/google/cloud/transfers/example_postgres_to_gcs.py +++ b/providers/tests/system/google/cloud/transfers/example_postgres_to_gcs.py @@ -290,13 +290,13 @@ def delete_connection(connection_id: str) -> None: postgres_to_gcs >> [delete_gcs_bucket, delete_firewall_rule, delete_gce_instance, delete_connection_task] delete_gce_instance >> delete_persistent_disk - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/translate/example_translate.py b/providers/tests/system/google/cloud/translate/example_translate.py index 13a3ee061e308..44af8bf5d1d12 100644 --- a/providers/tests/system/google/cloud/translate/example_translate.py +++ b/providers/tests/system/google/cloud/translate/example_translate.py @@ -56,14 +56,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/translate_speech/example_translate_speech.py b/providers/tests/system/google/cloud/translate_speech/example_translate_speech.py index 4fe34d0b86f57..2e023d68ae537 100644 --- a/providers/tests/system/google/cloud/translate_speech/example_translate_speech.py +++ b/providers/tests/system/google/cloud/translate_speech/example_translate_speech.py @@ -110,14 +110,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py index 8dd0ecc9c9550..e22622dd58605 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py @@ -150,13 +150,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py index bf96da992aca1..77629b0bc78c1 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py @@ -135,13 +135,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py index ca6c65e1a6ddd..cd75035793af0 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py @@ -52,13 +52,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py index fb32c7475c7f1..106e622abad5e 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py @@ -140,13 +140,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py index 6c3db89382f24..ea68e55fa2605 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py @@ -145,13 +145,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py index 78bb9ffa6bad4..06a56f379654f 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py @@ -231,13 +231,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py index 1295d20983eeb..316e0d3a7cbbe 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_container.py @@ -208,13 +208,13 @@ def TABULAR_DATASET(bucket_name): # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py index 3fa6169cb2759..eb2a080a92a1f 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job.py @@ -250,13 +250,13 @@ def TABULAR_DATASET(bucket_name): # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py index fbc40b888216b..f1b471d446262 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py @@ -211,13 +211,13 @@ def TABULAR_DATASET(bucket_name): # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py index 3e1b98a2232d1..af4fbb88c2b90 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_dataset.py @@ -269,13 +269,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py index 12b1181a79280..13e24b869c79e 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_endpoint.py @@ -206,13 +206,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py index 7e696184eef41..a1140afc1983f 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model.py @@ -155,14 +155,14 @@ ) # [END how_to_cloud_vertex_ai_run_evaluation_operator] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py index affba0b6aede8..9ce1c77350e67 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_generative_model_tuning.py @@ -56,13 +56,13 @@ ) # [END how_to_cloud_vertex_ai_supervised_fine_tuning_train_operator] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py index 55254741d9cc2..4fabdbc6ac67b 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py @@ -181,13 +181,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py index 75de719b6aa9e..230aeca14a67f 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py @@ -51,13 +51,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py index 3ad5537a10c29..272069b2f87f0 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_model_service.py @@ -345,13 +345,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py index 5ab29ee999f16..bc97ff6a58be3 100644 --- a/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py +++ b/providers/tests/system/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py @@ -177,13 +177,13 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py b/providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py index e630fd1471a8a..21ee27cd9be76 100644 --- a/providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py +++ b/providers/tests/system/google/cloud/video_intelligence/example_video_intelligence.py @@ -154,14 +154,14 @@ delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vision/example_vision_annotate_image.py b/providers/tests/system/google/cloud/vision/example_vision_annotate_image.py index 1e09fb7fe4f9d..178920e83a433 100644 --- a/providers/tests/system/google/cloud/vision/example_vision_annotate_image.py +++ b/providers/tests/system/google/cloud/vision/example_vision_annotate_image.py @@ -191,14 +191,14 @@ delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vision/example_vision_autogenerated.py b/providers/tests/system/google/cloud/vision/example_vision_autogenerated.py index 11fd3cdd54029..e84af0fdbac34 100644 --- a/providers/tests/system/google/cloud/vision/example_vision_autogenerated.py +++ b/providers/tests/system/google/cloud/vision/example_vision_autogenerated.py @@ -268,14 +268,14 @@ delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/vision/example_vision_explicit.py b/providers/tests/system/google/cloud/vision/example_vision_explicit.py index 0c71be95bda7a..2e480fec4f75e 100644 --- a/providers/tests/system/google/cloud/vision/example_vision_explicit.py +++ b/providers/tests/system/google/cloud/vision/example_vision_explicit.py @@ -279,14 +279,14 @@ delete_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/cloud/workflows/example_workflows.py b/providers/tests/system/google/cloud/workflows/example_workflows.py index 6d13484acbb42..b933d75a6f2b6 100644 --- a/providers/tests/system/google/cloud/workflows/example_workflows.py +++ b/providers/tests/system/google/cloud/workflows/example_workflows.py @@ -227,14 +227,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/datacatalog/example_datacatalog_entries.py b/providers/tests/system/google/datacatalog/example_datacatalog_entries.py index 0f84361d4fcc8..644adadeef31c 100644 --- a/providers/tests/system/google/datacatalog/example_datacatalog_entries.py +++ b/providers/tests/system/google/datacatalog/example_datacatalog_entries.py @@ -200,14 +200,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/datacatalog/example_datacatalog_search_catalog.py b/providers/tests/system/google/datacatalog/example_datacatalog_search_catalog.py index a77a023f32338..0ff6cf0716a28 100644 --- a/providers/tests/system/google/datacatalog/example_datacatalog_search_catalog.py +++ b/providers/tests/system/google/datacatalog/example_datacatalog_search_catalog.py @@ -223,14 +223,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py b/providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py index af49d3de08fa7..19bf62c88ccd5 100644 --- a/providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py +++ b/providers/tests/system/google/datacatalog/example_datacatalog_tag_templates.py @@ -183,14 +183,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/datacatalog/example_datacatalog_tags.py b/providers/tests/system/google/datacatalog/example_datacatalog_tags.py index fdbdaa451056a..0f74b49e102ea 100644 --- a/providers/tests/system/google/datacatalog/example_datacatalog_tags.py +++ b/providers/tests/system/google/datacatalog/example_datacatalog_tags.py @@ -233,14 +233,14 @@ # ### Everything below this line is not part of example ### # ### Just for system tests purpose ### - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/leveldb/example_leveldb.py b/providers/tests/system/google/leveldb/example_leveldb.py index 8b11d9e87bebc..52a543f048e49 100644 --- a/providers/tests/system/google/leveldb/example_leveldb.py +++ b/providers/tests/system/google/leveldb/example_leveldb.py @@ -60,14 +60,14 @@ # [END howto_operator_leveldb_put_key] get_key_leveldb_task >> put_key_leveldb_task - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/marketing_platform/example_analytics_admin.py b/providers/tests/system/google/marketing_platform/example_analytics_admin.py index 16ce6f8190bdc..3536e7b945da2 100644 --- a/providers/tests/system/google/marketing_platform/example_analytics_admin.py +++ b/providers/tests/system/google/marketing_platform/example_analytics_admin.py @@ -219,13 +219,13 @@ def delete_connection(connection_id: str) -> None: # TEST TEARDOWN >> delete_connection_task ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/marketing_platform/example_campaign_manager.py b/providers/tests/system/google/marketing_platform/example_campaign_manager.py index 0c3b26e5e98b5..70e8c208d50eb 100644 --- a/providers/tests/system/google/marketing_platform/example_campaign_manager.py +++ b/providers/tests/system/google/marketing_platform/example_campaign_manager.py @@ -322,13 +322,13 @@ def delete_connection(connection_id: str) -> None: >> delete_connection(connection_id=CONNECTION_ID) ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/marketing_platform/example_search_ads.py b/providers/tests/system/google/marketing_platform/example_search_ads.py index 5d86a3ad1684d..cb733f5bcf4f3 100644 --- a/providers/tests/system/google/marketing_platform/example_search_ads.py +++ b/providers/tests/system/google/marketing_platform/example_search_ads.py @@ -102,7 +102,7 @@ (query_report >> get_field >> search_fields >> get_custom_column >> list_custom_columns) -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/google/suite/example_local_to_drive.py b/providers/tests/system/google/suite/example_local_to_drive.py index ac8ebfc5aabbc..397e65fa2bb5a 100644 --- a/providers/tests/system/google/suite/example_local_to_drive.py +++ b/providers/tests/system/google/suite/example_local_to_drive.py @@ -141,13 +141,13 @@ def delete_connection(connection_id: str) -> None: >> delete_connection_task ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/http/example_http.py b/providers/tests/system/http/example_http.py index 98423943607cd..a2900ad582461 100644 --- a/providers/tests/system/http/example_http.py +++ b/providers/tests/system/http/example_http.py @@ -157,7 +157,7 @@ def get_next_page_cursor(response) -> dict | None: task_get_op_response_filter >> task_put_op >> task_del_op >> task_post_op_formenc task_post_op_formenc >> task_get_paginated -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/influxdb/example_influxdb.py b/providers/tests/system/influxdb/example_influxdb.py index 8e4d486742d5f..9b67b05d0a6fd 100644 --- a/providers/tests/system/influxdb/example_influxdb.py +++ b/providers/tests/system/influxdb/example_influxdb.py @@ -61,13 +61,13 @@ def test_influxdb_hook(): ) as dag: test_influxdb_hook() - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/influxdb/example_influxdb_query.py b/providers/tests/system/influxdb/example_influxdb_query.py index 95940e8c8f366..8c4c548b84f4c 100644 --- a/providers/tests/system/influxdb/example_influxdb_query.py +++ b/providers/tests/system/influxdb/example_influxdb_query.py @@ -43,7 +43,7 @@ # [END howto_operator_influxdb] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/jdbc/example_jdbc_queries.py b/providers/tests/system/jdbc/example_jdbc_queries.py index ce9234a6fb0b7..5e73c22b3cf15 100644 --- a/providers/tests/system/jdbc/example_jdbc_queries.py +++ b/providers/tests/system/jdbc/example_jdbc_queries.py @@ -59,13 +59,13 @@ delete_data >> insert_data >> run_this_last - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/jenkins/example_jenkins_job_trigger.py b/providers/tests/system/jenkins/example_jenkins_job_trigger.py index 6b7fab62a0d36..a24b48dda6c37 100644 --- a/providers/tests/system/jenkins/example_jenkins_job_trigger.py +++ b/providers/tests/system/jenkins/example_jenkins_job_trigger.py @@ -72,7 +72,7 @@ def grab_artifact_from_jenkins(url): # job_trigger >> grab_artifact_from_jenkins() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_adf_run_pipeline.py b/providers/tests/system/microsoft/azure/example_adf_run_pipeline.py index a6eae4d71486d..e65524b3c746b 100644 --- a/providers/tests/system/microsoft/azure/example_adf_run_pipeline.py +++ b/providers/tests/system/microsoft/azure/example_adf_run_pipeline.py @@ -108,13 +108,13 @@ # Task dependency created via `XComArgs`: # run_pipeline2 >> pipeline_run_sensor - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_adls_create.py b/providers/tests/system/microsoft/azure/example_adls_create.py index 3f76525b6b87e..2c5828da44603 100644 --- a/providers/tests/system/microsoft/azure/example_adls_create.py +++ b/providers/tests/system/microsoft/azure/example_adls_create.py @@ -46,13 +46,13 @@ upload_data >> delete_file - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_adls_delete.py b/providers/tests/system/microsoft/azure/example_adls_delete.py index 34abc6a9b2dae..fcfe1793f27eb 100644 --- a/providers/tests/system/microsoft/azure/example_adls_delete.py +++ b/providers/tests/system/microsoft/azure/example_adls_delete.py @@ -46,13 +46,13 @@ upload_file >> remove_file - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_adls_list.py b/providers/tests/system/microsoft/azure/example_adls_list.py index 594b161aaa9d6..26e65e0cf8d9c 100644 --- a/providers/tests/system/microsoft/azure/example_adls_list.py +++ b/providers/tests/system/microsoft/azure/example_adls_list.py @@ -42,13 +42,13 @@ ) # [END howto_operator_adls_list] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_azure_batch_operator.py b/providers/tests/system/microsoft/azure/example_azure_batch_operator.py index 944c737f96a0d..33d4603c16d43 100644 --- a/providers/tests/system/microsoft/azure/example_azure_batch_operator.py +++ b/providers/tests/system/microsoft/azure/example_azure_batch_operator.py @@ -57,7 +57,7 @@ ) # [END howto_azure_batch_operator] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_azure_container_instances.py b/providers/tests/system/microsoft/azure/example_azure_container_instances.py index ee9b025193e36..fe7ccfd48ecc4 100644 --- a/providers/tests/system/microsoft/azure/example_azure_container_instances.py +++ b/providers/tests/system/microsoft/azure/example_azure_container_instances.py @@ -90,7 +90,7 @@ cpu=1.0, task_id="start_container_with_azure_container_volume", ) -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_azure_cosmosdb.py b/providers/tests/system/microsoft/azure/example_azure_cosmosdb.py index 1d4bb60b05163..7dc9524ef46e1 100644 --- a/providers/tests/system/microsoft/azure/example_azure_cosmosdb.py +++ b/providers/tests/system/microsoft/azure/example_azure_cosmosdb.py @@ -64,13 +64,13 @@ t1 >> t2 - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_azure_service_bus.py b/providers/tests/system/microsoft/azure/example_azure_service_bus.py index bfa77db40dba0..091fdff3c2cba 100644 --- a/providers/tests/system/microsoft/azure/example_azure_service_bus.py +++ b/providers/tests/system/microsoft/azure/example_azure_service_bus.py @@ -172,13 +172,13 @@ delete_service_bus_queue, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_azure_synapse.py b/providers/tests/system/microsoft/azure/example_azure_synapse.py index c7f7800e57834..571cd5b46ff88 100644 --- a/providers/tests/system/microsoft/azure/example_azure_synapse.py +++ b/providers/tests/system/microsoft/azure/example_azure_synapse.py @@ -69,7 +69,7 @@ ) # [END howto_operator_azure_synapse] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_fileshare.py b/providers/tests/system/microsoft/azure/example_fileshare.py index bfa9819dbef68..49909ae38fe73 100644 --- a/providers/tests/system/microsoft/azure/example_fileshare.py +++ b/providers/tests/system/microsoft/azure/example_fileshare.py @@ -55,13 +55,13 @@ def delete_fileshare(): ) as dag: create_fileshare() >> delete_fileshare() - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_local_to_adls.py b/providers/tests/system/microsoft/azure/example_local_to_adls.py index d540aaf34d993..f8eace7577528 100644 --- a/providers/tests/system/microsoft/azure/example_local_to_adls.py +++ b/providers/tests/system/microsoft/azure/example_local_to_adls.py @@ -47,13 +47,13 @@ upload_file >> delete_file - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_local_to_wasb.py b/providers/tests/system/microsoft/azure/example_local_to_wasb.py index 7e368ca839928..859c33975842f 100644 --- a/providers/tests/system/microsoft/azure/example_local_to_wasb.py +++ b/providers/tests/system/microsoft/azure/example_local_to_wasb.py @@ -49,13 +49,13 @@ upload >> delete - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_msfabric.py b/providers/tests/system/microsoft/azure/example_msfabric.py index b1113025286d4..0f65df2f72f9e 100644 --- a/providers/tests/system/microsoft/azure/example_msfabric.py +++ b/providers/tests/system/microsoft/azure/example_msfabric.py @@ -51,13 +51,13 @@ ) # [END howto_operator_ms_fabric_create_item_schedule] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_msgraph.py b/providers/tests/system/microsoft/azure/example_msgraph.py index 33ee00468523b..239ebb76ab09b 100644 --- a/providers/tests/system/microsoft/azure/example_msgraph.py +++ b/providers/tests/system/microsoft/azure/example_msgraph.py @@ -49,13 +49,13 @@ site_task >> site_pages_task - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_powerbi.py b/providers/tests/system/microsoft/azure/example_powerbi.py index b5a982a1159a5..a2d9d675476bb 100644 --- a/providers/tests/system/microsoft/azure/example_powerbi.py +++ b/providers/tests/system/microsoft/azure/example_powerbi.py @@ -97,13 +97,13 @@ workspaces_task >> workspaces_info_task >> check_workspace_status_task refresh_dataset_task >> refresh_dataset_history_task - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_powerbi_dataset_refresh.py b/providers/tests/system/microsoft/azure/example_powerbi_dataset_refresh.py index c02cec3e57f5e..5453caff6463b 100644 --- a/providers/tests/system/microsoft/azure/example_powerbi_dataset_refresh.py +++ b/providers/tests/system/microsoft/azure/example_powerbi_dataset_refresh.py @@ -76,13 +76,13 @@ def create_connection(conn_id_name: str): refresh_powerbi_dataset, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_s3_to_wasb.py b/providers/tests/system/microsoft/azure/example_s3_to_wasb.py index 88ae64324d17e..699da911d0846 100644 --- a/providers/tests/system/microsoft/azure/example_s3_to_wasb.py +++ b/providers/tests/system/microsoft/azure/example_s3_to_wasb.py @@ -103,13 +103,13 @@ remove_s3_bucket, ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure when "tearDown" task with trigger # rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_sftp_to_wasb.py b/providers/tests/system/microsoft/azure/example_sftp_to_wasb.py index 42e5968304f08..0ed953a5853d8 100644 --- a/providers/tests/system/microsoft/azure/example_sftp_to_wasb.py +++ b/providers/tests/system/microsoft/azure/example_sftp_to_wasb.py @@ -75,13 +75,13 @@ def delete_sftp_file(): transfer_files_to_sftp_step >> transfer_files_to_azure >> delete_blob_file_step >> delete_sftp_file() - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_synapse_run_pipeline.py b/providers/tests/system/microsoft/azure/example_synapse_run_pipeline.py index de87d6b4c2483..589f840e46ab9 100644 --- a/providers/tests/system/microsoft/azure/example_synapse_run_pipeline.py +++ b/providers/tests/system/microsoft/azure/example_synapse_run_pipeline.py @@ -43,13 +43,13 @@ # [END howto_operator_azure_synapse_run_pipeline] begin >> run_pipeline1 - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/azure/example_wasb_sensors.py b/providers/tests/system/microsoft/azure/example_wasb_sensors.py index 56c1dce34d688..c4dd7c8fa0b2e 100644 --- a/providers/tests/system/microsoft/azure/example_wasb_sensors.py +++ b/providers/tests/system/microsoft/azure/example_wasb_sensors.py @@ -62,7 +62,7 @@ # [END wasb_prefix_sensor] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/mssql/example_mssql.py b/providers/tests/system/microsoft/mssql/example_mssql.py index 12e2815c59d01..8e29d805ab261 100644 --- a/providers/tests/system/microsoft/mssql/example_mssql.py +++ b/providers/tests/system/microsoft/mssql/example_mssql.py @@ -146,12 +146,12 @@ def insert_mssql_hook(): ) # [END mssql_operator_howto_guide] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/microsoft/winrm/example_winrm.py b/providers/tests/system/microsoft/winrm/example_winrm.py index 1de1cb8c0fd68..1c7e31ed6752e 100644 --- a/providers/tests/system/microsoft/winrm/example_winrm.py +++ b/providers/tests/system/microsoft/winrm/example_winrm.py @@ -64,13 +64,13 @@ [t1, t2, t3] >> run_this_last - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/mysql/example_mysql.py b/providers/tests/system/mysql/example_mysql.py index 6f0f884197f3a..2f1f37f573a22 100644 --- a/providers/tests/system/mysql/example_mysql.py +++ b/providers/tests/system/mysql/example_mysql.py @@ -59,7 +59,7 @@ drop_table_mysql_task >> mysql_task -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/neo4j/example_neo4j.py b/providers/tests/system/neo4j/example_neo4j.py index 80db3fbb2ab1d..54de74d5f9fbd 100644 --- a/providers/tests/system/neo4j/example_neo4j.py +++ b/providers/tests/system/neo4j/example_neo4j.py @@ -48,7 +48,7 @@ # [END run_query_neo4j_operator] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/openai/example_openai.py b/providers/tests/system/openai/example_openai.py index d342207b5d09b..3475597abd209 100644 --- a/providers/tests/system/openai/example_openai.py +++ b/providers/tests/system/openai/example_openai.py @@ -104,7 +104,7 @@ def task_to_store_input_text_in_xcom(): example_openai_dag() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/openai/example_trigger_batch_operator.py b/providers/tests/system/openai/example_trigger_batch_operator.py index 3dae1df5d9d0b..4fd3531fab379 100644 --- a/providers/tests/system/openai/example_trigger_batch_operator.py +++ b/providers/tests/system/openai/example_trigger_batch_operator.py @@ -111,7 +111,7 @@ def cleanup_batch_output_file(batch_id, **context): openai_batch_chat_completions() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/opensearch/example_opensearch.py b/providers/tests/system/opensearch/example_opensearch.py index e339fbe788481..904f17a47be7a 100644 --- a/providers/tests/system/opensearch/example_opensearch.py +++ b/providers/tests/system/opensearch/example_opensearch.py @@ -118,14 +118,14 @@ def load_connections(): chain(create_index, add_document_by_class, add_document_by_args, search_high_level, search_low_level) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/opsgenie/example_opsgenie_alert.py b/providers/tests/system/opsgenie/example_opsgenie_alert.py index cf11f824eec8c..be4056b407e6d 100644 --- a/providers/tests/system/opsgenie/example_opsgenie_alert.py +++ b/providers/tests/system/opsgenie/example_opsgenie_alert.py @@ -51,7 +51,7 @@ ) # [END howto_opsgenie_delete_alert_operator] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/opsgenie/example_opsgenie_notifier.py b/providers/tests/system/opsgenie/example_opsgenie_notifier.py index 97f5945aa9772..79123cf83c537 100644 --- a/providers/tests/system/opsgenie/example_opsgenie_notifier.py +++ b/providers/tests/system/opsgenie/example_opsgenie_notifier.py @@ -37,7 +37,7 @@ ) # [END howto_notifier_opsgenie] -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/papermill/example_papermill.py b/providers/tests/system/papermill/example_papermill.py index 529647fe4e719..33b427785cf25 100644 --- a/providers/tests/system/papermill/example_papermill.py +++ b/providers/tests/system/papermill/example_papermill.py @@ -52,7 +52,7 @@ ) # [END howto_operator_papermill] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/papermill/example_papermill_remote_verify.py b/providers/tests/system/papermill/example_papermill_remote_verify.py index f325928b8cd01..89d482858cb06 100644 --- a/providers/tests/system/papermill/example_papermill_remote_verify.py +++ b/providers/tests/system/papermill/example_papermill_remote_verify.py @@ -75,7 +75,7 @@ def check_notebook(output_notebook, execution_date): ) # [END howto_verify_operator_papermill_remote_kernel] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/papermill/example_papermill_verify.py b/providers/tests/system/papermill/example_papermill_verify.py index cfae2cc6ed3e0..632b209f21ef6 100644 --- a/providers/tests/system/papermill/example_papermill_verify.py +++ b/providers/tests/system/papermill/example_papermill_verify.py @@ -73,7 +73,7 @@ def check_notebook(inlets, execution_date): run_this >> check_notebook(inlets=AUTO, execution_date="{{ execution_date }}") # [END howto_verify_operator_papermill] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/pgvector/example_pgvector.py b/providers/tests/system/pgvector/example_pgvector.py index 5c16b9da307cb..27402ae38f11b 100644 --- a/providers/tests/system/pgvector/example_pgvector.py +++ b/providers/tests/system/pgvector/example_pgvector.py @@ -76,7 +76,7 @@ def cleanup_postgres_objects(): example_pgvector_dag() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/pgvector/example_pgvector_openai.py b/providers/tests/system/pgvector/example_pgvector_openai.py index 9c8f841e8f933..9d0a3165c22dd 100644 --- a/providers/tests/system/pgvector/example_pgvector_openai.py +++ b/providers/tests/system/pgvector/example_pgvector_openai.py @@ -90,7 +90,7 @@ def cleanup_postgres_objects(): example_pgvector_dag() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/pinecone/example_create_pod_index.py b/providers/tests/system/pinecone/example_create_pod_index.py index e0342e2d62008..9a93b1241f884 100644 --- a/providers/tests/system/pinecone/example_create_pod_index.py +++ b/providers/tests/system/pinecone/example_create_pod_index.py @@ -56,7 +56,7 @@ def delete_index(): create_index >> delete_index() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/pinecone/example_create_serverless_index.py b/providers/tests/system/pinecone/example_create_serverless_index.py index 28078b1b608cd..e276c345c4a03 100644 --- a/providers/tests/system/pinecone/example_create_serverless_index.py +++ b/providers/tests/system/pinecone/example_create_serverless_index.py @@ -55,7 +55,7 @@ def delete_index(): create_index >> delete_index() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/pinecone/example_dag_pinecone.py b/providers/tests/system/pinecone/example_dag_pinecone.py index 744f6518dceae..fd23f79bf7291 100644 --- a/providers/tests/system/pinecone/example_dag_pinecone.py +++ b/providers/tests/system/pinecone/example_dag_pinecone.py @@ -46,7 +46,7 @@ # [END howto_operator_pinecone_ingest] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/pinecone/example_pinecone_cohere.py b/providers/tests/system/pinecone/example_pinecone_cohere.py index 80e9b1efad8eb..8813479d61a70 100644 --- a/providers/tests/system/pinecone/example_pinecone_cohere.py +++ b/providers/tests/system/pinecone/example_pinecone_cohere.py @@ -76,7 +76,7 @@ def delete_index(): create_index() >> embed_task >> transformed_output >> perform_ingestion >> delete_index() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/pinecone/example_pinecone_openai.py b/providers/tests/system/pinecone/example_pinecone_openai.py index dad83f48085a5..b75a155192578 100644 --- a/providers/tests/system/pinecone/example_pinecone_openai.py +++ b/providers/tests/system/pinecone/example_pinecone_openai.py @@ -107,7 +107,7 @@ def delete_index(): create_index >> embed_task >> perform_ingestion >> delete_index() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/postgres/example_postgres.py b/providers/tests/system/postgres/example_postgres.py index 5ffaf6ea0098a..70ad784ecdf19 100644 --- a/providers/tests/system/postgres/example_postgres.py +++ b/providers/tests/system/postgres/example_postgres.py @@ -80,13 +80,13 @@ create_pet_table >> populate_pet_table >> get_all_pets >> get_birth_date # [END postgres_sql_execute_query_operator_howto_guide] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/presto/example_gcs_to_presto.py b/providers/tests/system/presto/example_gcs_to_presto.py index ceccf098471dc..6c8baaf691346 100644 --- a/providers/tests/system/presto/example_gcs_to_presto.py +++ b/providers/tests/system/presto/example_gcs_to_presto.py @@ -50,7 +50,7 @@ # [END gcs_csv_to_presto_table] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/qdrant/example_dag_qdrant.py b/providers/tests/system/qdrant/example_dag_qdrant.py index 85c92d9c99312..a6dc94d443586 100644 --- a/providers/tests/system/qdrant/example_dag_qdrant.py +++ b/providers/tests/system/qdrant/example_dag_qdrant.py @@ -43,7 +43,7 @@ # [END howto_operator_qdrant_ingest] -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/redis/example_redis_publish.py b/providers/tests/system/redis/example_redis_publish.py index 335256f68e57d..671041e7d9808 100644 --- a/providers/tests/system/redis/example_redis_publish.py +++ b/providers/tests/system/redis/example_redis_publish.py @@ -73,13 +73,13 @@ publish_task >> key_sensor_task - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/salesforce/example_bulk.py b/providers/tests/system/salesforce/example_bulk.py index 7d43de648805d..71887e57f2156 100644 --- a/providers/tests/system/salesforce/example_bulk.py +++ b/providers/tests/system/salesforce/example_bulk.py @@ -90,7 +90,7 @@ # [END howto_salesforce_bulk_delete_operation] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/salesforce/example_salesforce_apex_rest.py b/providers/tests/system/salesforce/example_salesforce_apex_rest.py index 2961ffa4e4784..3d724289b338e 100644 --- a/providers/tests/system/salesforce/example_salesforce_apex_rest.py +++ b/providers/tests/system/salesforce/example_salesforce_apex_rest.py @@ -41,7 +41,7 @@ # [END howto_salesforce_apex_rest_operator] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/samba/example_gcs_to_samba.py b/providers/tests/system/samba/example_gcs_to_samba.py index d7717fcbfc727..895ddcf624e7e 100644 --- a/providers/tests/system/samba/example_gcs_to_samba.py +++ b/providers/tests/system/samba/example_gcs_to_samba.py @@ -138,14 +138,14 @@ >> delete_bucket ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/sftp/example_sftp_sensor.py b/providers/tests/system/sftp/example_sftp_sensor.py index a3bde8a03af0a..cd23e18d7d56a 100644 --- a/providers/tests/system/sftp/example_sftp_sensor.py +++ b/providers/tests/system/sftp/example_sftp_sensor.py @@ -89,14 +89,14 @@ def sftp_sensor_decorator(): >> remove_file_task_end ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/singularity/example_singularity.py b/providers/tests/system/singularity/example_singularity.py index e80a0f40eb955..7f311f39c5faa 100644 --- a/providers/tests/system/singularity/example_singularity.py +++ b/providers/tests/system/singularity/example_singularity.py @@ -50,7 +50,7 @@ t3 >> t4 -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/slack/example_slack.py b/providers/tests/system/slack/example_slack.py index f122dffca2782..1b0be71efdc9f 100644 --- a/providers/tests/system/slack/example_slack.py +++ b/providers/tests/system/slack/example_slack.py @@ -94,7 +94,7 @@ ) -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/slack/example_slack_webhook.py b/providers/tests/system/slack/example_slack_webhook.py index 7e45dc92d921f..6a59f6e4de70d 100644 --- a/providers/tests/system/slack/example_slack_webhook.py +++ b/providers/tests/system/slack/example_slack_webhook.py @@ -69,7 +69,7 @@ slack_webhook_operator_text >> slack_webhook_operator_blocks -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/slack/example_sql_to_slack.py b/providers/tests/system/slack/example_sql_to_slack.py index 457bcecaea671..bd42e3b47d4c5 100644 --- a/providers/tests/system/slack/example_sql_to_slack.py +++ b/providers/tests/system/slack/example_sql_to_slack.py @@ -52,7 +52,7 @@ # [END howto_operator_sql_to_slack_api_file] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/slack/example_sql_to_slack_webhook.py b/providers/tests/system/slack/example_sql_to_slack_webhook.py index 44fad156657be..75da7353daa2d 100644 --- a/providers/tests/system/slack/example_sql_to_slack_webhook.py +++ b/providers/tests/system/slack/example_sql_to_slack_webhook.py @@ -51,7 +51,7 @@ # [END howto_operator_sql_to_slack_webhook] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/snowflake/example_copy_into_snowflake.py b/providers/tests/system/snowflake/example_copy_into_snowflake.py index 32eb9699d74b6..a2f9b9277e308 100644 --- a/providers/tests/system/snowflake/example_copy_into_snowflake.py +++ b/providers/tests/system/snowflake/example_copy_into_snowflake.py @@ -60,7 +60,7 @@ # [END howto_operator_s3_copy_into_snowflake] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/snowflake/example_snowflake.py b/providers/tests/system/snowflake/example_snowflake.py index e851b13c08200..f2bf8d00679de 100644 --- a/providers/tests/system/snowflake/example_snowflake.py +++ b/providers/tests/system/snowflake/example_snowflake.py @@ -96,7 +96,7 @@ ) -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/snowflake/example_snowpark_decorator.py b/providers/tests/system/snowflake/example_snowpark_decorator.py index 4c2319a91b21e..59cf1f1fedbb6 100644 --- a/providers/tests/system/snowflake/example_snowpark_decorator.py +++ b/providers/tests/system/snowflake/example_snowpark_decorator.py @@ -80,7 +80,7 @@ def check_num_rows(table_name: str): # [END howto_decorator_snowpark] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/snowflake/example_snowpark_operator.py b/providers/tests/system/snowflake/example_snowpark_operator.py index 1d7cce34f7aa1..c9ede310074c5 100644 --- a/providers/tests/system/snowflake/example_snowpark_operator.py +++ b/providers/tests/system/snowflake/example_snowpark_operator.py @@ -89,7 +89,7 @@ def check_num_rows(table_name: str): # [END howto_operator_snowpark] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/sqlite/example_sqlite.py b/providers/tests/system/sqlite/example_sqlite.py index 1ea0270a0df27..e3eec31c1bfd6 100644 --- a/providers/tests/system/sqlite/example_sqlite.py +++ b/providers/tests/system/sqlite/example_sqlite.py @@ -91,13 +91,13 @@ def replace_sqlite_hook(): >> replace_sqlite_hook() ) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/tableau/example_tableau.py b/providers/tests/system/tableau/example_tableau.py index 1472511a66c95..41b99b0ece89e 100644 --- a/providers/tests/system/tableau/example_tableau.py +++ b/providers/tests/system/tableau/example_tableau.py @@ -71,7 +71,7 @@ # task_refresh_workbook_non_blocking >> task_check_job_status -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/telegram/example_telegram.py b/providers/tests/system/telegram/example_telegram.py index a7e9018cf85f1..23341f991447d 100644 --- a/providers/tests/system/telegram/example_telegram.py +++ b/providers/tests/system/telegram/example_telegram.py @@ -45,7 +45,7 @@ # [END howto_operator_telegram] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py b/providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py index 26fd27554d5d1..dec92a90366bb 100644 --- a/providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py +++ b/providers/tests/system/teradata/example_azure_blob_to_teradata_transfer.py @@ -205,13 +205,13 @@ ) # [END azure_blob_to_teradata_transfer_operator_howto_guide] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/teradata/example_s3_to_teradata_transfer.py b/providers/tests/system/teradata/example_s3_to_teradata_transfer.py index ab361990b21e4..d7ba2a3b415f8 100644 --- a/providers/tests/system/teradata/example_s3_to_teradata_transfer.py +++ b/providers/tests/system/teradata/example_s3_to_teradata_transfer.py @@ -206,13 +206,13 @@ ) # [END s3_to_teradata_transfer_operator_howto_guide] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/teradata/example_ssl_teradata.py b/providers/tests/system/teradata/example_ssl_teradata.py index 3836715a8e634..8ec5898f7437c 100644 --- a/providers/tests/system/teradata/example_ssl_teradata.py +++ b/providers/tests/system/teradata/example_ssl_teradata.py @@ -121,13 +121,13 @@ # [END teradata_operator_howto_guide] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/teradata/example_teradata.py b/providers/tests/system/teradata/example_teradata.py index ae41effce7fec..a12fb3abb7e6c 100644 --- a/providers/tests/system/teradata/example_teradata.py +++ b/providers/tests/system/teradata/example_teradata.py @@ -164,13 +164,13 @@ # [END teradata_operator_howto_guide] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/teradata/example_teradata_call_sp.py b/providers/tests/system/teradata/example_teradata_call_sp.py index f48583d83ec55..d41121324bcf3 100644 --- a/providers/tests/system/teradata/example_teradata_call_sp.py +++ b/providers/tests/system/teradata/example_teradata_call_sp.py @@ -162,13 +162,13 @@ # [END howto_teradata_operator_for_sp] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/teradata/example_teradata_compute_cluster.py b/providers/tests/system/teradata/example_teradata_compute_cluster.py index 210cde7816bc2..6ffb4fd99ec67 100644 --- a/providers/tests/system/teradata/example_teradata_compute_cluster.py +++ b/providers/tests/system/teradata/example_teradata_compute_cluster.py @@ -146,13 +146,13 @@ # [END teradata_vantage_lake_compute_cluster_howto_guide] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/teradata/example_teradata_to_teradata_transfer.py b/providers/tests/system/teradata/example_teradata_to_teradata_transfer.py index 7f315202606fc..a96d42816a271 100644 --- a/providers/tests/system/teradata/example_teradata_to_teradata_transfer.py +++ b/providers/tests/system/teradata/example_teradata_to_teradata_transfer.py @@ -148,13 +148,13 @@ ) # [END teradata_to_teradata_transfer_operator_howto_guide] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/trino/example_gcs_to_trino.py b/providers/tests/system/trino/example_gcs_to_trino.py index e0249101eb1a4..4c559301e64da 100644 --- a/providers/tests/system/trino/example_gcs_to_trino.py +++ b/providers/tests/system/trino/example_gcs_to_trino.py @@ -50,7 +50,7 @@ # [END gcs_csv_to_trino_table] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/trino/example_trino.py b/providers/tests/system/trino/example_trino.py index d708d9b7b8fb7..db9fef4128b93 100644 --- a/providers/tests/system/trino/example_trino.py +++ b/providers/tests/system/trino/example_trino.py @@ -91,7 +91,7 @@ # [END howto_operator_trino] -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/weaviate/example_weaviate_cohere.py b/providers/tests/system/weaviate/example_weaviate_cohere.py index 1db5e2cccb72a..11b45c2cb45be 100644 --- a/providers/tests/system/weaviate/example_weaviate_cohere.py +++ b/providers/tests/system/weaviate/example_weaviate_cohere.py @@ -115,7 +115,7 @@ def delete_weaviate_collections(): example_weaviate_cohere() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/weaviate/example_weaviate_dynamic_mapping_dag.py b/providers/tests/system/weaviate/example_weaviate_dynamic_mapping_dag.py index 38c0168a9f615..b3dfb52a60fb9 100644 --- a/providers/tests/system/weaviate/example_weaviate_dynamic_mapping_dag.py +++ b/providers/tests/system/weaviate/example_weaviate_dynamic_mapping_dag.py @@ -92,7 +92,7 @@ def delete_weaviate_collection(collection_name): example_weaviate_dynamic_mapping_dag() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/weaviate/example_weaviate_openai.py b/providers/tests/system/weaviate/example_weaviate_openai.py index edad9058f8738..76713826035a1 100644 --- a/providers/tests/system/weaviate/example_weaviate_openai.py +++ b/providers/tests/system/weaviate/example_weaviate_openai.py @@ -123,7 +123,7 @@ def delete_weaviate_collection(): example_weaviate_openai() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/weaviate/example_weaviate_operator.py b/providers/tests/system/weaviate/example_weaviate_operator.py index 309864ad64924..734a7e5fa3945 100644 --- a/providers/tests/system/weaviate/example_weaviate_operator.py +++ b/providers/tests/system/weaviate/example_weaviate_operator.py @@ -297,7 +297,7 @@ def delete_weaviate_docs_collection_without_vector(): example_weaviate_using_operator() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/weaviate/example_weaviate_using_hook.py b/providers/tests/system/weaviate/example_weaviate_using_hook.py index 601a222f94d4e..8df913e0044fd 100644 --- a/providers/tests/system/weaviate/example_weaviate_using_hook.py +++ b/providers/tests/system/weaviate/example_weaviate_using_hook.py @@ -140,7 +140,7 @@ def delete_weaviate_collection_without_vector(): example_weaviate_dag_using_hook() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/weaviate/example_weaviate_vectorizer_dag.py b/providers/tests/system/weaviate/example_weaviate_vectorizer_dag.py index f0306177bffd4..17b7e25edb3cb 100644 --- a/providers/tests/system/weaviate/example_weaviate_vectorizer_dag.py +++ b/providers/tests/system/weaviate/example_weaviate_vectorizer_dag.py @@ -99,7 +99,7 @@ def delete_weaviate_collection(): example_weaviate_vectorizer_dag() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/weaviate/example_weaviate_without_vectorizer_dag.py b/providers/tests/system/weaviate/example_weaviate_without_vectorizer_dag.py index e2cf9e560e785..efb8d89730f6e 100644 --- a/providers/tests/system/weaviate/example_weaviate_without_vectorizer_dag.py +++ b/providers/tests/system/weaviate/example_weaviate_without_vectorizer_dag.py @@ -111,7 +111,7 @@ def delete_weaviate_collection(): example_weaviate_without_vectorizer_dag() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/yandex/example_yandexcloud.py b/providers/tests/system/yandex/example_yandexcloud.py index ab72adc14c051..85284af2642ef 100644 --- a/providers/tests/system/yandex/example_yandexcloud.py +++ b/providers/tests/system/yandex/example_yandexcloud.py @@ -27,14 +27,13 @@ import yandex.cloud.dataproc.v1.job_service_pb2_grpc as job_service_grpc_pb import yandex.cloud.dataproc.v1.subcluster_pb2 as subcluster_pb from google.protobuf.json_format import MessageToDict +from tests_common.test_utils.system_tests import get_test_env_id from yandexcloud.operations import OperationError from airflow import DAG from airflow.decorators import task from airflow.providers.yandex.hooks.yandex import YandexCloudBaseHook -from dev.tests_common.test_utils.system_tests import get_test_env_id - ENV_ID = get_test_env_id() DAG_ID = "example_yandexcloud_hook" @@ -197,14 +196,14 @@ def delete_cluster( spark_job >> delete_task - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/yandex/example_yandexcloud_dataproc.py b/providers/tests/system/yandex/example_yandexcloud_dataproc.py index d97e3d592ef88..13cabfc0c48a8 100644 --- a/providers/tests/system/yandex/example_yandexcloud_dataproc.py +++ b/providers/tests/system/yandex/example_yandexcloud_dataproc.py @@ -19,6 +19,8 @@ import uuid from datetime import datetime +from tests_common.test_utils.system_tests import get_test_env_id + from airflow import DAG from airflow.providers.yandex.operators.dataproc import ( DataprocCreateClusterOperator, @@ -32,8 +34,6 @@ # Name of the datacenter where Dataproc cluster will be created from airflow.utils.trigger_rule import TriggerRule -from dev.tests_common.test_utils.system_tests import get_test_env_id - # should be filled with appropriate ids @@ -163,13 +163,13 @@ create_cluster >> create_mapreduce_job >> create_hive_query >> create_hive_query_from_file create_hive_query_from_file >> create_spark_job >> create_pyspark_job >> delete_cluster - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/yandex/example_yandexcloud_dataproc_lightweight.py b/providers/tests/system/yandex/example_yandexcloud_dataproc_lightweight.py index 38401a1cdcd39..fe77142f3d069 100644 --- a/providers/tests/system/yandex/example_yandexcloud_dataproc_lightweight.py +++ b/providers/tests/system/yandex/example_yandexcloud_dataproc_lightweight.py @@ -18,6 +18,8 @@ from datetime import datetime +from tests_common.test_utils.system_tests import get_test_env_id + from airflow import DAG from airflow.providers.yandex.operators.dataproc import ( DataprocCreateClusterOperator, @@ -28,8 +30,6 @@ # Name of the datacenter where Dataproc cluster will be created from airflow.utils.trigger_rule import TriggerRule -from dev.tests_common.test_utils.system_tests import get_test_env_id - # should be filled with appropriate ids @@ -71,13 +71,13 @@ ) create_spark_job >> delete_cluster - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/yandex/example_yandexcloud_yq.py b/providers/tests/system/yandex/example_yandexcloud_yq.py index 02478825ed5e0..a83e8bd789f97 100644 --- a/providers/tests/system/yandex/example_yandexcloud_yq.py +++ b/providers/tests/system/yandex/example_yandexcloud_yq.py @@ -18,12 +18,12 @@ from datetime import datetime +from tests_common.test_utils.system_tests import get_test_env_id + from airflow.models.dag import DAG from airflow.operators.empty import EmptyOperator from airflow.providers.yandex.operators.yq import YQExecuteQueryOperator -from dev.tests_common.test_utils.system_tests import get_test_env_id - ENV_ID = get_test_env_id() DAG_ID = "example_yandexcloud_yq" @@ -40,13 +40,13 @@ yq_operator = YQExecuteQueryOperator(task_id="sample_query", sql="select 33 as d, 44 as t") yq_operator >> run_this_last - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/ydb/example_ydb.py b/providers/tests/system/ydb/example_ydb.py index d7293cb0d1b56..2f3f88cabe64b 100644 --- a/providers/tests/system/ydb/example_ydb.py +++ b/providers/tests/system/ydb/example_ydb.py @@ -119,13 +119,13 @@ def populate_pet_table_via_bulk_upsert(): ) # [END ydb_operator_howto_guide] - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/system/zendesk/example_zendesk_custom_get.py b/providers/tests/system/zendesk/example_zendesk_custom_get.py index c38d10a26abd3..a94f4e3ef6b5b 100644 --- a/providers/tests/system/zendesk/example_zendesk_custom_get.py +++ b/providers/tests/system/zendesk/example_zendesk_custom_get.py @@ -45,7 +45,7 @@ def fetch_organizations() -> list[dict]: fetch_organizations() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/providers/tests/yandex/hooks/test_yandex.py b/providers/tests/yandex/hooks/test_yandex.py index fbf77115053b2..eacb2708e944a 100644 --- a/providers/tests/yandex/hooks/test_yandex.py +++ b/providers/tests/yandex/hooks/test_yandex.py @@ -25,11 +25,11 @@ yandexcloud = pytest.importorskip("yandexcloud") +from tests_common.test_utils.config import conf_vars + from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.yandex.hooks.yandex import YandexCloudBaseHook -from dev.tests_common.test_utils.config import conf_vars - class TestYandexHook: @mock.patch("airflow.hooks.base.BaseHook.get_connection") diff --git a/providers/tests/yandex/links/test_yq.py b/providers/tests/yandex/links/test_yq.py index 2ed720e7e141c..dfd255d41c0eb 100644 --- a/providers/tests/yandex/links/test_yq.py +++ b/providers/tests/yandex/links/test_yq.py @@ -19,14 +19,13 @@ from unittest import mock import pytest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.mock_operators import MockOperator from airflow.models.taskinstance import TaskInstance from airflow.models.xcom import XCom from airflow.providers.yandex.links.yq import YQLink -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.mock_operators import MockOperator - yandexcloud = pytest.importorskip("yandexcloud") diff --git a/providers/tests/yandex/operators/test_yq.py b/providers/tests/yandex/operators/test_yq.py index 7e586e929eaff..8c5204497230d 100644 --- a/providers/tests/yandex/operators/test_yq.py +++ b/providers/tests/yandex/operators/test_yq.py @@ -21,8 +21,7 @@ from unittest.mock import MagicMock, call, patch import pytest - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS yandexcloud = pytest.importorskip("yandexcloud") diff --git a/pyproject.toml b/pyproject.toml index 160e8e2ce0b0c..a5dadf0e96651 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -342,7 +342,7 @@ section-order = [ # Make sure we put the "dev" imports at the end, not as a third-party module [tool.ruff.lint.isort.sections] -testing = ["dev", "providers.tests", "task_sdk.tests"] +testing = ["dev", "providers.tests", "task_sdk.tests", "test_common", "tests"] [tool.ruff.lint.extend-per-file-ignores] "airflow/__init__.py" = ["F401", "TCH004"] @@ -377,7 +377,7 @@ testing = ["dev", "providers.tests", "task_sdk.tests"] # TRY002: Use `raise Exception` "dev/perf/*" = ["TID253"] "dev/check_files.py" = ["S101"] -"dev/tests_common/*" = ["S101", "TRY002"] +"tests_common/*" = ["S101", "TRY002"] "dev/breeze/tests/*" = ["TID253", "S101", "TRY002"] "tests/*" = ["D", "TID253", "S101", "TRY002"] "docker_tests/*" = ["D", "TID253", "S101", "TRY002"] @@ -395,7 +395,7 @@ testing = ["dev", "providers.tests", "task_sdk.tests"] "helm_tests/airflow_aux/test_basic_helm_chart.py" = ["B028"] # Test compat imports banned imports to allow testing against older airflow versions -"dev/tests_common/test_utils/compat.py" = ["TID251", "F401"] +"tests_common/test_utils/compat.py" = ["TID251", "F401"] [tool.ruff.lint.flake8-tidy-imports] # Disallow all relative imports. diff --git a/scripts/ci/docker-compose/local.yml b/scripts/ci/docker-compose/local.yml index f0bce46894164..1b68dc84aa08f 100644 --- a/scripts/ci/docker-compose/local.yml +++ b/scripts/ci/docker-compose/local.yml @@ -100,6 +100,9 @@ services: - type: bind source: ../../../scripts/docker/entrypoint_ci.sh target: /entrypoint + - type: bind + source: ../../../tests_common + target: /opt/airflow/tests_common - type: bind source: ../../../tests target: /opt/airflow/tests diff --git a/scripts/ci/pre_commit/check_system_tests.py b/scripts/ci/pre_commit/check_system_tests.py index c7c305460cb3e..1eceafaab1da4 100755 --- a/scripts/ci/pre_commit/check_system_tests.py +++ b/scripts/ci/pre_commit/check_system_tests.py @@ -37,13 +37,13 @@ WATCHER_APPEND_INSTRUCTION = "list(dag.tasks) >> watcher()" PYTEST_FUNCTION = """ -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) """ PYTEST_FUNCTION_PATTERN = re.compile( - r"from dev\.tests_common\.test_utils\.system_tests import get_test_run(?: # noqa: E402)?\s+" + r"from tests_common\.test_utils\.system_tests import get_test_run(?: # noqa: E402)?\s+" r"(?:# .+\))?\s+" r"test_run = get_test_run\(dag\)" ) @@ -51,11 +51,11 @@ def _check_file(file: Path): content = file.read_text() - if "from dev.tests_common.test_utils.watcher import watcher" in content: + if "from tests_common.test_utils.watcher import watcher" in content: index = content.find(WATCHER_APPEND_INSTRUCTION) if index == -1: errors.append( - f"[red]The example {file} imports dev.tests_common.test_utils.watcher " + f"[red]The example {file} imports tests_common.test_utils.watcher " f"but does not use it properly![/]\n\n" "[yellow]Make sure you have:[/]\n\n" f" {WATCHER_APPEND_INSTRUCTION}\n\n" diff --git a/scripts/in_container/install_devel_deps.py b/scripts/in_container/install_devel_deps.py index 84f28bb2cc48b..971e11725dd8d 100755 --- a/scripts/in_container/install_devel_deps.py +++ b/scripts/in_container/install_devel_deps.py @@ -28,7 +28,7 @@ def get_devel_test_deps() -> list[str]: # Pre-install the tests_common pytest plugin/utils, in case sources aren't mounted - devel_deps: list[str] = ["./dev/tests_common"] + devel_deps: list[str] = ["./tests_common"] hatch_build_content = (AIRFLOW_SOURCES_DIR / "hatch_build.py").read_text().splitlines() store = False for line in hatch_build_content: diff --git a/task_sdk/tests/conftest.py b/task_sdk/tests/conftest.py index 7839b299eef2c..a410a1217c2f0 100644 --- a/task_sdk/tests/conftest.py +++ b/task_sdk/tests/conftest.py @@ -18,7 +18,7 @@ import pytest -pytest_plugins = "dev.tests_common.pytest_plugin" +pytest_plugins = "tests_common.pytest_plugin" @pytest.hookimpl(tryfirst=True) diff --git a/tests/always/test_connection.py b/tests/always/test_connection.py index 0f6b27c62d4ef..e289c7d2172ed 100644 --- a/tests/always/test_connection.py +++ b/tests/always/test_connection.py @@ -33,8 +33,7 @@ from airflow.models import Connection, crypto from airflow.providers.sqlite.hooks.sqlite import SqliteHook from airflow.providers_manager import HookInfo - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars ConnectionParts = namedtuple("ConnectionParts", ["conn_type", "login", "password", "host", "port", "schema"]) diff --git a/tests/always/test_example_dags.py b/tests/always/test_example_dags.py index 9def5f42206f8..edfc9b46f916e 100644 --- a/tests/always/test_example_dags.py +++ b/tests/always/test_example_dags.py @@ -29,8 +29,7 @@ from airflow.models import DagBag from airflow.utils import yaml - -from dev.tests_common.test_utils.asserts import assert_queries_count +from tests_common.test_utils.asserts import assert_queries_count AIRFLOW_SOURCES_ROOT = Path(__file__).resolve().parents[2] AIRFLOW_PROVIDERS_ROOT = AIRFLOW_SOURCES_ROOT / "airflow" / "providers" diff --git a/tests/always/test_secrets.py b/tests/always/test_secrets.py index a8b2296de1ccd..1e47fc13ce7c9 100644 --- a/tests/always/test_secrets.py +++ b/tests/always/test_secrets.py @@ -24,9 +24,8 @@ from airflow.configuration import ensure_secrets_loaded, initialize_secrets_backends from airflow.models import Connection, Variable from airflow.secrets.cache import SecretCache - -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_variables +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_variables class TestConnectionsFromSecrets: diff --git a/tests/always/test_secrets_backends.py b/tests/always/test_secrets_backends.py index c5093d2e562a7..a74c7ac4e95f0 100644 --- a/tests/always/test_secrets_backends.py +++ b/tests/always/test_secrets_backends.py @@ -28,8 +28,7 @@ from airflow.secrets.environment_variables import EnvironmentVariablesBackend from airflow.secrets.metastore import MetastoreBackend from airflow.utils.session import create_session - -from dev.tests_common.test_utils.db import clear_db_connections, clear_db_variables +from tests_common.test_utils.db import clear_db_connections, clear_db_variables pytestmark = pytest.mark.db_test diff --git a/tests/always/test_secrets_local_filesystem.py b/tests/always/test_secrets_local_filesystem.py index 40472ec64d5f5..0a84120d2d087 100644 --- a/tests/always/test_secrets_local_filesystem.py +++ b/tests/always/test_secrets_local_filesystem.py @@ -29,8 +29,7 @@ from airflow.models import Variable from airflow.secrets import local_filesystem from airflow.secrets.local_filesystem import LocalFilesystemBackend - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars @contextmanager diff --git a/tests/api_connexion/conftest.py b/tests/api_connexion/conftest.py index 8ab76fbb76298..0d91a8dc34a76 100644 --- a/tests/api_connexion/conftest.py +++ b/tests/api_connexion/conftest.py @@ -19,9 +19,8 @@ import pytest from airflow.www import app - -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.decorators import dont_initialize_flask_app_submodules @pytest.fixture(scope="session") @@ -39,7 +38,7 @@ def minimal_app_for_api(): def factory(): with conf_vars( { - ("api", "auth_backends"): "dev.tests_common.test_utils.remote_user_api_auth_backend", + ("api", "auth_backends"): "tests_common.test_utils.remote_user_api_auth_backend", ( "core", "auth_manager", diff --git a/tests/api_connexion/endpoints/test_asset_endpoint.py b/tests/api_connexion/endpoints/test_asset_endpoint.py index 39b0129c3317d..e02844561f8c4 100644 --- a/tests/api_connexion/endpoints/test_asset_endpoint.py +++ b/tests/api_connexion/endpoints/test_asset_endpoint.py @@ -36,12 +36,11 @@ from airflow.utils import timezone from airflow.utils.session import provide_session from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.asserts import assert_queries_count -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_assets, clear_db_runs -from dev.tests_common.test_utils.www import _check_last_log +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.asserts import assert_queries_count +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_assets, clear_db_runs +from tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_backfill_endpoint.py b/tests/api_connexion/endpoints/test_backfill_endpoint.py index 5bb959cabaf56..bc5a214654368 100644 --- a/tests/api_connexion/endpoints/test_backfill_endpoint.py +++ b/tests/api_connexion/endpoints/test_backfill_endpoint.py @@ -29,9 +29,8 @@ from airflow.operators.empty import EmptyOperator from airflow.utils import timezone from airflow.utils.session import provide_session - -from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user -from dev.tests_common.test_utils.db import ( +from tests_common.test_utils.api_connexion_utils import create_user, delete_user +from tests_common.test_utils.db import ( clear_db_backfills, clear_db_dags, clear_db_runs, diff --git a/tests/api_connexion/endpoints/test_config_endpoint.py b/tests/api_connexion/endpoints/test_config_endpoint.py index c46a6571c23b9..86046ec3f37c3 100644 --- a/tests/api_connexion/endpoints/test_config_endpoint.py +++ b/tests/api_connexion/endpoints/test_config_endpoint.py @@ -21,8 +21,8 @@ import pytest -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_connection_endpoint.py b/tests/api_connexion/endpoints/test_connection_endpoint.py index fb2d53074cd87..b82fac609bd9e 100644 --- a/tests/api_connexion/endpoints/test_connection_endpoint.py +++ b/tests/api_connexion/endpoints/test_connection_endpoint.py @@ -25,11 +25,10 @@ from airflow.models import Connection from airflow.secrets.environment_variables import CONN_ENV_PREFIX from airflow.utils.session import provide_session - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_connections -from dev.tests_common.test_utils.www import _check_last_log +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_connections +from tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_endpoint.py b/tests/api_connexion/endpoints/test_dag_endpoint.py index 86e0a3b4b3ec6..fd3b8b7d90d17 100644 --- a/tests/api_connexion/endpoints/test_dag_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_endpoint.py @@ -30,11 +30,10 @@ from airflow.operators.empty import EmptyOperator from airflow.utils.session import provide_session from airflow.utils.state import TaskInstanceState - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags -from dev.tests_common.test_utils.www import _check_last_log +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_parsing.py b/tests/api_connexion/endpoints/test_dag_parsing.py index 0fca8ee63c973..53648150c2c01 100644 --- a/tests/api_connexion/endpoints/test_dag_parsing.py +++ b/tests/api_connexion/endpoints/test_dag_parsing.py @@ -24,9 +24,8 @@ from airflow.models import DagBag from airflow.models.dagbag import DagPriorityParsingRequest - -from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user -from dev.tests_common.test_utils.db import clear_db_dag_parsing_requests +from tests_common.test_utils.api_connexion_utils import create_user, delete_user +from tests_common.test_utils.db import clear_db_dag_parsing_requests pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_run_endpoint.py b/tests/api_connexion/endpoints/test_dag_run_endpoint.py index b881f5ea98638..bad9ff1481c4d 100644 --- a/tests/api_connexion/endpoints/test_dag_run_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_run_endpoint.py @@ -34,12 +34,11 @@ from airflow.utils.session import create_session, provide_session from airflow.utils.state import DagRunState, State from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags -from dev.tests_common.test_utils.www import _check_last_log +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from tests_common.test_utils.www import _check_last_log if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/api_connexion/endpoints/test_dag_source_endpoint.py b/tests/api_connexion/endpoints/test_dag_source_endpoint.py index 80f32dc715a3c..8a5c91ae670a3 100644 --- a/tests/api_connexion/endpoints/test_dag_source_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_source_endpoint.py @@ -23,9 +23,8 @@ import pytest from airflow.models import DagBag - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.db import clear_db_dag_code, clear_db_dags, clear_db_serialized_dags +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.db import clear_db_dag_code, clear_db_dags, clear_db_serialized_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_stats_endpoint.py b/tests/api_connexion/endpoints/test_dag_stats_endpoint.py index fe563b9444030..179662cd8f4ed 100644 --- a/tests/api_connexion/endpoints/test_dag_stats_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_stats_endpoint.py @@ -26,9 +26,8 @@ from airflow.utils.session import create_session from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from tests_common.test_utils.api_connexion_utils import create_user, delete_user +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_dag_warning_endpoint.py b/tests/api_connexion/endpoints/test_dag_warning_endpoint.py index b9c00991413bb..f3521ed883c73 100644 --- a/tests/api_connexion/endpoints/test_dag_warning_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_warning_endpoint.py @@ -23,9 +23,8 @@ from airflow.models.dag import DagModel from airflow.models.dagwarning import DagWarning from airflow.utils.session import create_session - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.db import clear_db_dag_warnings, clear_db_dags +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.db import clear_db_dag_warnings, clear_db_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_event_log_endpoint.py b/tests/api_connexion/endpoints/test_event_log_endpoint.py index 42a610890ede2..bcd668c93cce6 100644 --- a/tests/api_connexion/endpoints/test_event_log_endpoint.py +++ b/tests/api_connexion/endpoints/test_event_log_endpoint.py @@ -21,10 +21,9 @@ from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.models import Log from airflow.utils import timezone - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_logs +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_logs pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_extra_link_endpoint.py b/tests/api_connexion/endpoints/test_extra_link_endpoint.py index 373571a42ee47..30b8234cd746a 100644 --- a/tests/api_connexion/endpoints/test_extra_link_endpoint.py +++ b/tests/api_connexion/endpoints/test_extra_link_endpoint.py @@ -30,12 +30,11 @@ from airflow.utils import timezone from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BaseOperatorLink -from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom -from dev.tests_common.test_utils.mock_operators import CustomOperator -from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager +from tests_common.test_utils.api_connexion_utils import create_user, delete_user +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BaseOperatorLink +from tests_common.test_utils.db import clear_db_runs, clear_db_xcom +from tests_common.test_utils.mock_operators import CustomOperator +from tests_common.test_utils.mock_plugins import mock_plugin_manager if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/api_connexion/endpoints/test_import_error_endpoint.py b/tests/api_connexion/endpoints/test_import_error_endpoint.py index f4f7d03b323f2..0e2ea9f97200c 100644 --- a/tests/api_connexion/endpoints/test_import_error_endpoint.py +++ b/tests/api_connexion/endpoints/test_import_error_endpoint.py @@ -23,11 +23,10 @@ from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.utils import timezone from airflow.utils.session import provide_session - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.compat import ParseImportError -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_import_errors +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.compat import ParseImportError +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_dags, clear_db_import_errors pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_log_endpoint.py b/tests/api_connexion/endpoints/test_log_endpoint.py index 21d4e8feb83ad..ef542a74d7a8f 100644 --- a/tests/api_connexion/endpoints/test_log_endpoint.py +++ b/tests/api_connexion/endpoints/test_log_endpoint.py @@ -32,9 +32,8 @@ from airflow.operators.empty import EmptyOperator from airflow.utils import timezone from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.db import clear_db_runs +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.db import clear_db_runs pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py b/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py index e41bbcc6c2e88..843de41f1b401 100644 --- a/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py +++ b/tests/api_connexion/endpoints/test_mapped_task_instance_endpoint.py @@ -32,10 +32,9 @@ from airflow.utils.session import provide_session from airflow.utils.state import State, TaskInstanceState from airflow.utils.timezone import datetime - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields -from dev.tests_common.test_utils.mock_operators import MockOperator +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields +from tests_common.test_utils.mock_operators import MockOperator pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_plugin_endpoint.py b/tests/api_connexion/endpoints/test_plugin_endpoint.py index 2831af06bf3d9..b04ec7b7d5823 100644 --- a/tests/api_connexion/endpoints/test_plugin_endpoint.py +++ b/tests/api_connexion/endpoints/test_plugin_endpoint.py @@ -28,11 +28,10 @@ from airflow.ti_deps.deps.base_ti_dep import BaseTIDep from airflow.timetables.base import Timetable from airflow.utils.module_loading import qualname - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.compat import BaseOperatorLink -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.compat import BaseOperatorLink +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.mock_plugins import mock_plugin_manager pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_pool_endpoint.py b/tests/api_connexion/endpoints/test_pool_endpoint.py index 455c417bd2581..d19dbf836d025 100644 --- a/tests/api_connexion/endpoints/test_pool_endpoint.py +++ b/tests/api_connexion/endpoints/test_pool_endpoint.py @@ -21,11 +21,10 @@ from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.models.pool import Pool from airflow.utils.session import provide_session - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_pools -from dev.tests_common.test_utils.www import _check_last_log +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_pools +from tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_provider_endpoint.py b/tests/api_connexion/endpoints/test_provider_endpoint.py index 92e14887ec038..c526aabe8c005 100644 --- a/tests/api_connexion/endpoints/test_provider_endpoint.py +++ b/tests/api_connexion/endpoints/test_provider_endpoint.py @@ -21,8 +21,7 @@ import pytest from airflow.providers_manager import ProviderInfo - -from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user +from tests_common.test_utils.api_connexion_utils import create_user, delete_user pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_task_endpoint.py b/tests/api_connexion/endpoints/test_task_endpoint.py index 60237946a82ca..567394208510b 100644 --- a/tests/api_connexion/endpoints/test_task_endpoint.py +++ b/tests/api_connexion/endpoints/test_task_endpoint.py @@ -27,9 +27,8 @@ from airflow.models.expandinput import EXPAND_INPUT_EMPTY from airflow.models.serialized_dag import SerializedDagModel from airflow.operators.empty import EmptyOperator - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_task_instance_endpoint.py b/tests/api_connexion/endpoints/test_task_instance_endpoint.py index 809c2fab9aa2d..bec52292fbe46 100644 --- a/tests/api_connexion/endpoints/test_task_instance_endpoint.py +++ b/tests/api_connexion/endpoints/test_task_instance_endpoint.py @@ -35,10 +35,9 @@ from airflow.utils.state import State from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields -from dev.tests_common.test_utils.www import _check_last_log +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.db import clear_db_runs, clear_db_sla_miss, clear_rendered_ti_fields +from tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_variable_endpoint.py b/tests/api_connexion/endpoints/test_variable_endpoint.py index e4302636184d3..640251ebc14f3 100644 --- a/tests/api_connexion/endpoints/test_variable_endpoint.py +++ b/tests/api_connexion/endpoints/test_variable_endpoint.py @@ -22,11 +22,10 @@ from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP from airflow.models import Variable - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_variables -from dev.tests_common.test_utils.www import _check_last_log +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_variables +from tests_common.test_utils.www import _check_last_log pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/endpoints/test_xcom_endpoint.py b/tests/api_connexion/endpoints/test_xcom_endpoint.py index 0293e672c47d2..12d56ae9894ea 100644 --- a/tests/api_connexion/endpoints/test_xcom_endpoint.py +++ b/tests/api_connexion/endpoints/test_xcom_endpoint.py @@ -30,10 +30,9 @@ from airflow.utils.session import create_session from airflow.utils.timezone import utcnow from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom +from tests_common.test_utils.api_connexion_utils import assert_401, create_user, delete_user +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_connection_schema.py b/tests/api_connexion/schemas/test_connection_schema.py index d7a0de4e89f05..e52b573875138 100644 --- a/tests/api_connexion/schemas/test_connection_schema.py +++ b/tests/api_connexion/schemas/test_connection_schema.py @@ -30,8 +30,7 @@ ) from airflow.models import Connection from airflow.utils.session import create_session, provide_session - -from dev.tests_common.test_utils.db import clear_db_connections +from tests_common.test_utils.db import clear_db_connections pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_dag_run_schema.py b/tests/api_connexion/schemas/test_dag_run_schema.py index b874ade4442ff..929a8af451112 100644 --- a/tests/api_connexion/schemas/test_dag_run_schema.py +++ b/tests/api_connexion/schemas/test_dag_run_schema.py @@ -29,9 +29,8 @@ from airflow.utils import timezone from airflow.utils.session import provide_session from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_runs +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/api_connexion/schemas/test_dataset_schema.py b/tests/api_connexion/schemas/test_dataset_schema.py index 769de6f1b10eb..669b87f6cf636 100644 --- a/tests/api_connexion/schemas/test_dataset_schema.py +++ b/tests/api_connexion/schemas/test_dataset_schema.py @@ -30,8 +30,7 @@ from airflow.assets import Asset from airflow.models.asset import AssetAliasModel, AssetEvent, AssetModel from airflow.operators.empty import EmptyOperator - -from dev.tests_common.test_utils.db import clear_db_assets, clear_db_dags +from tests_common.test_utils.db import clear_db_assets, clear_db_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_error_schema.py b/tests/api_connexion/schemas/test_error_schema.py index 5c358f2e2c374..5f31dfe2f1460 100644 --- a/tests/api_connexion/schemas/test_error_schema.py +++ b/tests/api_connexion/schemas/test_error_schema.py @@ -25,9 +25,8 @@ ) from airflow.utils import timezone from airflow.utils.session import provide_session - -from dev.tests_common.test_utils.compat import ParseImportError -from dev.tests_common.test_utils.db import clear_db_import_errors +from tests_common.test_utils.compat import ParseImportError +from tests_common.test_utils.db import clear_db_import_errors pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_plugin_schema.py b/tests/api_connexion/schemas/test_plugin_schema.py index ee079ce664348..df550a8301bdd 100644 --- a/tests/api_connexion/schemas/test_plugin_schema.py +++ b/tests/api_connexion/schemas/test_plugin_schema.py @@ -28,8 +28,7 @@ ) from airflow.hooks.base import BaseHook from airflow.plugins_manager import AirflowPlugin - -from dev.tests_common.test_utils.compat import BaseOperatorLink +from tests_common.test_utils.compat import BaseOperatorLink class PluginHook(BaseHook): ... diff --git a/tests/api_connexion/schemas/test_pool_schemas.py b/tests/api_connexion/schemas/test_pool_schemas.py index 23788426c5018..3e5f3d7f02904 100644 --- a/tests/api_connexion/schemas/test_pool_schemas.py +++ b/tests/api_connexion/schemas/test_pool_schemas.py @@ -21,8 +21,7 @@ from airflow.api_connexion.schemas.pool_schema import PoolCollection, pool_collection_schema, pool_schema from airflow.models.pool import Pool from airflow.utils.session import provide_session - -from dev.tests_common.test_utils.db import clear_db_pools +from tests_common.test_utils.db import clear_db_pools pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/schemas/test_xcom_schema.py b/tests/api_connexion/schemas/test_xcom_schema.py index 06c36c31b7767..366d707878b9c 100644 --- a/tests/api_connexion/schemas/test_xcom_schema.py +++ b/tests/api_connexion/schemas/test_xcom_schema.py @@ -30,8 +30,7 @@ from airflow.models import DagRun, XCom from airflow.utils.dates import parse_execution_date from airflow.utils.session import create_session - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/test_auth.py b/tests/api_connexion/test_auth.py index e568d618ad662..65e98c18c974d 100644 --- a/tests/api_connexion/test_auth.py +++ b/tests/api_connexion/test_auth.py @@ -22,10 +22,9 @@ from airflow.auth.managers.simple.simple_auth_manager import SimpleAuthManager from airflow.auth.managers.simple.user import SimpleAuthManagerUser - -from dev.tests_common.test_utils.api_connexion_utils import assert_401 -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_pools +from tests_common.test_utils.api_connexion_utils import assert_401 +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_pools pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_connexion/test_parameters.py b/tests/api_connexion/test_parameters.py index 57c9c05415f65..26fdcfa7aeca0 100644 --- a/tests/api_connexion/test_parameters.py +++ b/tests/api_connexion/test_parameters.py @@ -29,8 +29,7 @@ validate_istimezone, ) from airflow.utils import timezone - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars class TestValidateIsTimezone: diff --git a/tests/api_connexion/test_security.py b/tests/api_connexion/test_security.py index 2051cf613510e..bebf2ec340a60 100644 --- a/tests/api_connexion/test_security.py +++ b/tests/api_connexion/test_security.py @@ -18,7 +18,7 @@ import pytest -from dev.tests_common.test_utils.api_connexion_utils import create_user, delete_user +from tests_common.test_utils.api_connexion_utils import create_user, delete_user pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/api_fastapi/views/public/test_connections.py b/tests/api_fastapi/views/public/test_connections.py index a5cb301a43136..a0da0d2b9b28e 100644 --- a/tests/api_fastapi/views/public/test_connections.py +++ b/tests/api_fastapi/views/public/test_connections.py @@ -20,8 +20,7 @@ from airflow.models import Connection from airflow.utils.session import provide_session - -from dev.tests_common.test_utils.db import clear_db_connections +from tests_common.test_utils.db import clear_db_connections pytestmark = pytest.mark.db_test diff --git a/tests/api_fastapi/views/public/test_dag_run.py b/tests/api_fastapi/views/public/test_dag_run.py index 377aca4553d86..3c60b307bbbd2 100644 --- a/tests/api_fastapi/views/public/test_dag_run.py +++ b/tests/api_fastapi/views/public/test_dag_run.py @@ -25,8 +25,7 @@ from airflow.utils.session import provide_session from airflow.utils.state import DagRunState from airflow.utils.types import DagRunTriggeredByType, DagRunType - -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = pytest.mark.db_test diff --git a/tests/api_fastapi/views/public/test_dags.py b/tests/api_fastapi/views/public/test_dags.py index 8bc10407e6287..cd1809cb70d3d 100644 --- a/tests/api_fastapi/views/public/test_dags.py +++ b/tests/api_fastapi/views/public/test_dags.py @@ -27,8 +27,7 @@ from airflow.utils.session import provide_session from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags pytestmark = pytest.mark.db_test diff --git a/tests/api_fastapi/views/public/test_variables.py b/tests/api_fastapi/views/public/test_variables.py index 58a09538a971a..21f86c428b397 100644 --- a/tests/api_fastapi/views/public/test_variables.py +++ b/tests/api_fastapi/views/public/test_variables.py @@ -20,8 +20,7 @@ from airflow.models.variable import Variable from airflow.utils.session import provide_session - -from dev.tests_common.test_utils.db import clear_db_variables +from tests_common.test_utils.db import clear_db_variables pytestmark = pytest.mark.db_test diff --git a/tests/api_fastapi/views/ui/test_assets.py b/tests/api_fastapi/views/ui/test_assets.py index d0e93c709b076..d06631ec88b59 100644 --- a/tests/api_fastapi/views/ui/test_assets.py +++ b/tests/api_fastapi/views/ui/test_assets.py @@ -20,8 +20,7 @@ from airflow.assets import Asset from airflow.operators.empty import EmptyOperator - -from dev.tests_common.test_utils.db import initial_db_init +from tests_common.test_utils.db import initial_db_init pytestmark = pytest.mark.db_test diff --git a/tests/api_fastapi/views/ui/test_dashboard.py b/tests/api_fastapi/views/ui/test_dashboard.py index bee683535704f..f4a061f1dd0ff 100644 --- a/tests/api_fastapi/views/ui/test_dashboard.py +++ b/tests/api_fastapi/views/ui/test_dashboard.py @@ -26,8 +26,7 @@ from airflow.operators.empty import EmptyOperator from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.db import clear_db_runs +from tests_common.test_utils.db import clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/api_internal/endpoints/test_rpc_api_endpoint.py b/tests/api_internal/endpoints/test_rpc_api_endpoint.py index 364b0f44eaf27..72ce2afe4c19f 100644 --- a/tests/api_internal/endpoints/test_rpc_api_endpoint.py +++ b/tests/api_internal/endpoints/test_rpc_api_endpoint.py @@ -34,9 +34,8 @@ from airflow.utils.jwt_signer import JWTSigner from airflow.utils.state import State from airflow.www import app - -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.decorators import dont_initialize_flask_app_submodules # Note: Sounds a bit strange to disable internal API tests in isolation mode but... # As long as the test is modelled to run its own internal API endpoints, it is conflicting diff --git a/tests/api_internal/test_internal_api_call.py b/tests/api_internal/test_internal_api_call.py index 02ae2d9f55125..880e1e89b0388 100644 --- a/tests/api_internal/test_internal_api_call.py +++ b/tests/api_internal/test_internal_api_call.py @@ -34,8 +34,7 @@ from airflow.serialization.serialized_objects import BaseSerialization from airflow.settings import _ENABLE_AIP_44 from airflow.utils.state import State - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars if TYPE_CHECKING: from airflow.serialization.pydantic.taskinstance import TaskInstancePydantic diff --git a/tests/assets/test_manager.py b/tests/assets/test_manager.py index cd4563d9a9c4e..f2dcc88b8338b 100644 --- a/tests/assets/test_manager.py +++ b/tests/assets/test_manager.py @@ -38,6 +38,7 @@ from airflow.models.dag import DagModel from airflow.models.dagbag import DagPriorityParsingRequest from airflow.serialization.pydantic.taskinstance import TaskInstancePydantic + from tests.listeners import asset_listener pytestmark = pytest.mark.db_test @@ -48,7 +49,7 @@ @pytest.fixture def clear_assets(): - from dev.tests_common.test_utils.db import clear_db_assets + from tests_common.test_utils.db import clear_db_assets clear_db_assets() yield diff --git a/tests/assets/tests_asset.py b/tests/assets/tests_asset.py index 10d91e7a00fb7..0bcfb83e88a79 100644 --- a/tests/assets/tests_asset.py +++ b/tests/assets/tests_asset.py @@ -43,7 +43,7 @@ @pytest.fixture def clear_assets(): - from dev.tests_common.test_utils.db import clear_db_assets + from tests_common.test_utils.db import clear_db_assets clear_db_assets() yield diff --git a/tests/auth/managers/simple/views/test_auth.py b/tests/auth/managers/simple/views/test_auth.py index f61a9278f79a5..49f6237fc7e44 100644 --- a/tests/auth/managers/simple/views/test_auth.py +++ b/tests/auth/managers/simple/views/test_auth.py @@ -23,8 +23,7 @@ from airflow.auth.managers.simple.simple_auth_manager import SimpleAuthManager from airflow.www import app as application - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars @pytest.fixture diff --git a/tests/cli/commands/test_backfill_command.py b/tests/cli/commands/test_backfill_command.py index c01e1e4f9d07b..e798436f24445 100644 --- a/tests/cli/commands/test_backfill_command.py +++ b/tests/cli/commands/test_backfill_command.py @@ -28,8 +28,7 @@ from airflow.cli import cli_parser from airflow.models import DagBag from airflow.utils import timezone - -from dev.tests_common.test_utils.db import clear_db_backfills, clear_db_dags, clear_db_runs +from tests_common.test_utils.db import clear_db_backfills, clear_db_dags, clear_db_runs DEFAULT_DATE = timezone.make_aware(datetime(2015, 1, 1), timezone=timezone.utc) if pendulum.__version__.startswith("3"): diff --git a/tests/cli/commands/test_celery_command.py b/tests/cli/commands/test_celery_command.py index c29d835d8ee07..7e20a9c933f94 100644 --- a/tests/cli/commands/test_celery_command.py +++ b/tests/cli/commands/test_celery_command.py @@ -30,8 +30,7 @@ from airflow.cli.commands import celery_command from airflow.configuration import conf from airflow.executors import executor_loader - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_config_command.py b/tests/cli/commands/test_config_command.py index 90d38fa27ad03..3202990664285 100644 --- a/tests/cli/commands/test_config_command.py +++ b/tests/cli/commands/test_config_command.py @@ -22,8 +22,7 @@ from airflow.cli import cli_parser from airflow.cli.commands import config_command - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars STATSD_CONFIG_BEGIN_WITH = "# `StatsD `" diff --git a/tests/cli/commands/test_connection_command.py b/tests/cli/commands/test_connection_command.py index 06578eb1492c1..a04775433f468 100644 --- a/tests/cli/commands/test_connection_command.py +++ b/tests/cli/commands/test_connection_command.py @@ -33,8 +33,7 @@ from airflow.models import Connection from airflow.utils.db import merge_conn from airflow.utils.session import create_session - -from dev.tests_common.test_utils.db import clear_db_connections +from tests_common.test_utils.db import clear_db_connections pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_dag_command.py b/tests/cli/commands/test_dag_command.py index e9d10a2e33c18..fe48ec5f357cb 100644 --- a/tests/cli/commands/test_dag_command.py +++ b/tests/cli/commands/test_dag_command.py @@ -46,10 +46,10 @@ from airflow.utils.session import create_session from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType -from tests.models import TEST_DAGS_FOLDER +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_dags, clear_db_runs -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs +from tests.models import TEST_DAGS_FOLDER DEFAULT_DATE = timezone.make_aware(datetime(2015, 1, 1), timezone=timezone.utc) if pendulum.__version__.startswith("3"): diff --git a/tests/cli/commands/test_dag_processor_command.py b/tests/cli/commands/test_dag_processor_command.py index 643710ce1e415..71a30dcd7d89b 100644 --- a/tests/cli/commands/test_dag_processor_command.py +++ b/tests/cli/commands/test_dag_processor_command.py @@ -24,8 +24,7 @@ from airflow.cli import cli_parser from airflow.cli.commands import dag_processor_command from airflow.configuration import conf - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_fastapi_api_command.py b/tests/cli/commands/test_fastapi_api_command.py index 2de4b729bfb0e..b1a02b3b186e1 100644 --- a/tests/cli/commands/test_fastapi_api_command.py +++ b/tests/cli/commands/test_fastapi_api_command.py @@ -27,6 +27,7 @@ from airflow.cli.commands import fastapi_api_command from airflow.exceptions import AirflowConfigException + from tests.cli.commands._common_cli_classes import _CommonCLIGunicornTestClass console = Console(width=400, color_system="standard") diff --git a/tests/cli/commands/test_info_command.py b/tests/cli/commands/test_info_command.py index c6bea2d8a5bb8..ca5db5819c04b 100644 --- a/tests/cli/commands/test_info_command.py +++ b/tests/cli/commands/test_info_command.py @@ -32,8 +32,7 @@ from airflow.config_templates import airflow_local_settings from airflow.logging_config import configure_logging from airflow.version import version as airflow_version - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars def capture_show_output(instance): diff --git a/tests/cli/commands/test_internal_api_command.py b/tests/cli/commands/test_internal_api_command.py index 11123c7c211a1..f3dd664d1a273 100644 --- a/tests/cli/commands/test_internal_api_command.py +++ b/tests/cli/commands/test_internal_api_command.py @@ -31,9 +31,9 @@ from airflow.cli.commands import internal_api_command from airflow.cli.commands.internal_api_command import GunicornMonitor from airflow.settings import _ENABLE_AIP_44 -from tests.cli.commands._common_cli_classes import _CommonCLIGunicornTestClass +from tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.config import conf_vars +from tests.cli.commands._common_cli_classes import _CommonCLIGunicornTestClass console = Console(width=400, color_system="standard") diff --git a/tests/cli/commands/test_jobs_command.py b/tests/cli/commands/test_jobs_command.py index 61540a68c51c6..1c52aa71334e4 100644 --- a/tests/cli/commands/test_jobs_command.py +++ b/tests/cli/commands/test_jobs_command.py @@ -27,8 +27,7 @@ from airflow.jobs.scheduler_job_runner import SchedulerJobRunner from airflow.utils.session import create_session from airflow.utils.state import JobState, State - -from dev.tests_common.test_utils.db import clear_db_jobs +from tests_common.test_utils.db import clear_db_jobs @pytest.mark.skip_if_database_isolation_mode diff --git a/tests/cli/commands/test_kerberos_command.py b/tests/cli/commands/test_kerberos_command.py index 4a5e73ece788f..5d2f2e513bcc5 100644 --- a/tests/cli/commands/test_kerberos_command.py +++ b/tests/cli/commands/test_kerberos_command.py @@ -23,8 +23,7 @@ from airflow.cli import cli_parser from airflow.cli.commands import kerberos_command from airflow.security.kerberos import KerberosMode - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_kubernetes_command.py b/tests/cli/commands/test_kubernetes_command.py index 1e53bdc77d8d8..dbde7a89664ed 100644 --- a/tests/cli/commands/test_kubernetes_command.py +++ b/tests/cli/commands/test_kubernetes_command.py @@ -28,8 +28,7 @@ from airflow.cli import cli_parser from airflow.cli.commands import kubernetes_command from airflow.executors import executor_loader - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_plugins_command.py b/tests/cli/commands/test_plugins_command.py index 703b6879d7a3a..00d6cee5a0b5d 100644 --- a/tests/cli/commands/test_plugins_command.py +++ b/tests/cli/commands/test_plugins_command.py @@ -28,9 +28,9 @@ from airflow.hooks.base import BaseHook from airflow.listeners.listener import get_listener_manager from airflow.plugins_manager import AirflowPlugin -from tests.plugins.test_plugin import AirflowTestPlugin as ComplexAirflowPlugin +from tests_common.test_utils.mock_plugins import mock_plugin_manager -from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager +from tests.plugins.test_plugin import AirflowTestPlugin as ComplexAirflowPlugin pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] @@ -89,15 +89,15 @@ def test_should_display_one_plugin(self): } ], "global_operator_extra_links": [ - "", - "", + "", + "", ], "timetables": ["tests.plugins.test_plugin.CustomCronDataIntervalTimetable"], "operator_extra_links": [ - "", - "", - "", - "", + "", + "", + "", + "", ], "hooks": ["tests.plugins.test_plugin.PluginHook"], "listeners": [ diff --git a/tests/cli/commands/test_rotate_fernet_key_command.py b/tests/cli/commands/test_rotate_fernet_key_command.py index d638c2010e187..473c911d9a10c 100644 --- a/tests/cli/commands/test_rotate_fernet_key_command.py +++ b/tests/cli/commands/test_rotate_fernet_key_command.py @@ -26,9 +26,8 @@ from airflow.hooks.base import BaseHook from airflow.models import Connection, Variable from airflow.utils.session import provide_session - -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_connections, clear_db_variables +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_connections, clear_db_variables pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_scheduler_command.py b/tests/cli/commands/test_scheduler_command.py index 8fce5d6e289b3..a6adb514f203b 100644 --- a/tests/cli/commands/test_scheduler_command.py +++ b/tests/cli/commands/test_scheduler_command.py @@ -29,8 +29,7 @@ from airflow.executors import executor_loader from airflow.utils.scheduler_health import HealthServer, serve_health_check from airflow.utils.serve_logs import serve_logs - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_task_command.py b/tests/cli/commands/test_task_command.py index fb512792eb324..823172cffc555 100644 --- a/tests/cli/commands/test_task_command.py +++ b/tests/cli/commands/test_task_command.py @@ -51,10 +51,9 @@ from airflow.utils.session import create_session from airflow.utils.state import State, TaskInstanceState from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_pools, clear_db_runs +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_pools, clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/cli/commands/test_variable_command.py b/tests/cli/commands/test_variable_command.py index d35184c73c80a..630e7d321d3e0 100644 --- a/tests/cli/commands/test_variable_command.py +++ b/tests/cli/commands/test_variable_command.py @@ -29,8 +29,7 @@ from airflow.cli.commands import variable_command from airflow.models import Variable from airflow.utils.session import create_session - -from dev.tests_common.test_utils.db import clear_db_variables +from tests_common.test_utils.db import clear_db_variables pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/cli/commands/test_webserver_command.py b/tests/cli/commands/test_webserver_command.py index b72f4e4596035..a2cba19c9a216 100644 --- a/tests/cli/commands/test_webserver_command.py +++ b/tests/cli/commands/test_webserver_command.py @@ -30,9 +30,9 @@ from airflow.cli import cli_parser from airflow.cli.commands import webserver_command from airflow.cli.commands.webserver_command import GunicornMonitor -from tests.cli.commands._common_cli_classes import _CommonCLIGunicornTestClass +from tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.config import conf_vars +from tests.cli.commands._common_cli_classes import _CommonCLIGunicornTestClass console = Console(width=400, color_system="standard") diff --git a/tests/cli/conftest.py b/tests/cli/conftest.py index 61e27b0e74de4..83aeecdbb1ffb 100644 --- a/tests/cli/conftest.py +++ b/tests/cli/conftest.py @@ -25,8 +25,7 @@ from airflow.models.dagbag import DagBag from airflow.providers.celery.executors import celery_executor, celery_kubernetes_executor from airflow.providers.cncf.kubernetes.executors import kubernetes_executor, local_kubernetes_executor - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars # Create custom executors here because conftest is imported first custom_executor_module = type(sys)("custom_executor") diff --git a/tests/cli/test_cli_parser.py b/tests/cli/test_cli_parser.py index 46ac36803face..f777908c23d19 100644 --- a/tests/cli/test_cli_parser.py +++ b/tests/cli/test_cli_parser.py @@ -43,8 +43,7 @@ from airflow.executors.local_executor import LocalExecutor from airflow.providers.amazon.aws.executors.ecs.ecs_executor import AwsEcsExecutor from airflow.providers.celery.executors.celery_executor import CeleryExecutor - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/conftest.py b/tests/conftest.py index f956865e7a1c6..c5c6441f0b55f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -26,14 +26,14 @@ # unit test mode config is set as early as possible. assert "airflow" not in sys.modules, "No airflow module can be imported before these lines" -pytest_plugins = "dev.tests_common.pytest_plugin" +pytest_plugins = "tests_common.pytest_plugin" # Ignore files that are really test dags to be ignored by pytest collect_ignore = [ "tests/dags/subdir1/test_ignore_this.py", "tests/dags/test_invalid_dup_task.py", "tests/dags_corrupted/test_impersonation_custom.py", - "dev.tests_common.test_utils/perf/dags/elastic_dag.py", + "tests_common.test_utils/perf/dags/elastic_dag.py", ] diff --git a/tests/core/test_configuration.py b/tests/core/test_configuration.py index 0690a5f0182dc..1f376c955f503 100644 --- a/tests/core/test_configuration.py +++ b/tests/core/test_configuration.py @@ -42,6 +42,9 @@ write_default_airflow_configuration_if_needed, ) from airflow.providers_manager import ProvidersManager +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.reset_warning_registry import reset_warning_registry + from tests.utils.test_config import ( remove_all_configurations, set_deprecated_options, @@ -49,9 +52,6 @@ use_config, ) -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.reset_warning_registry import reset_warning_registry - HOME_DIR = os.path.expanduser("~") # The conf has been updated with sql_alchemy_con and deactivate_stale_dags_interval to test the diff --git a/tests/core/test_core.py b/tests/core/test_core.py index 13b85a9ef1700..2186d04f195b2 100644 --- a/tests/core/test_core.py +++ b/tests/core/test_core.py @@ -32,8 +32,7 @@ from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_task_fail +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_task_fail pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/core/test_example_dags_system.py b/tests/core/test_example_dags_system.py index bd34d9bb15919..a0d2a60d85299 100644 --- a/tests/core/test_example_dags_system.py +++ b/tests/core/test_example_dags_system.py @@ -28,9 +28,8 @@ from airflow.utils.module_loading import import_string from airflow.utils.state import DagRunState from airflow.utils.trigger_rule import TriggerRule - -from dev.tests_common.test_utils.system_tests import get_test_run -from dev.tests_common.test_utils.system_tests_class import SystemTest +from tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests_class import SystemTest def fail(): diff --git a/tests/core/test_impersonation_tests.py b/tests/core/test_impersonation_tests.py index 8350e95a8f4fd..0683b619d60f4 100644 --- a/tests/core/test_impersonation_tests.py +++ b/tests/core/test_impersonation_tests.py @@ -32,8 +32,7 @@ from airflow.utils.db import add_default_pool_if_not_exists from airflow.utils.state import State from airflow.utils.timezone import datetime - -from dev.tests_common.test_utils import db +from tests_common.test_utils import db # The entire module into the quarantined mark, this might have unpredictable side effects to other tests # and should be moved into the isolated environment into the future. diff --git a/tests/core/test_logging_config.py b/tests/core/test_logging_config.py index a316130ecdabe..aeb6d36ced04f 100644 --- a/tests/core/test_logging_config.py +++ b/tests/core/test_logging_config.py @@ -29,8 +29,7 @@ import pytest from airflow.configuration import conf - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars SETTINGS_FILE_VALID = """ LOGGING_CONFIG = { diff --git a/tests/core/test_otel_tracer.py b/tests/core/test_otel_tracer.py index b9612c49dae92..36ebb9f09ce07 100644 --- a/tests/core/test_otel_tracer.py +++ b/tests/core/test_otel_tracer.py @@ -26,8 +26,7 @@ from airflow.traces import TRACEPARENT, TRACESTATE, otel_tracer, utils from airflow.traces.tracer import Trace - -from dev.tests_common.test_utils.config import env_vars +from tests_common.test_utils.config import env_vars @pytest.fixture diff --git a/tests/core/test_sentry.py b/tests/core/test_sentry.py index c67b49980aae2..972e45dbba247 100644 --- a/tests/core/test_sentry.py +++ b/tests/core/test_sentry.py @@ -30,8 +30,7 @@ from airflow.utils import timezone from airflow.utils.module_loading import import_string from airflow.utils.state import State - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars EXECUTION_DATE = timezone.utcnow() SCHEDULE_INTERVAL = datetime.timedelta(days=1) diff --git a/tests/core/test_settings.py b/tests/core/test_settings.py index 619d64630029d..27b40ad7d1fae 100644 --- a/tests/core/test_settings.py +++ b/tests/core/test_settings.py @@ -33,8 +33,7 @@ from airflow.exceptions import AirflowClusterPolicyViolation, AirflowConfigException from airflow.settings import _ENABLE_AIP_44, TracebackSession, is_usage_data_collection_enabled from airflow.utils.session import create_session - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars SETTINGS_FILE_POLICY = """ def test_policy(task_instance): diff --git a/tests/core/test_sqlalchemy_config.py b/tests/core/test_sqlalchemy_config.py index 5fed8745fd085..99fb90d3ba2e3 100644 --- a/tests/core/test_sqlalchemy_config.py +++ b/tests/core/test_sqlalchemy_config.py @@ -25,8 +25,7 @@ from airflow import settings from airflow.api_internal.internal_api_call import InternalApiConfig from airflow.exceptions import AirflowConfigException - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars SQL_ALCHEMY_CONNECT_ARGS = {"test": 43503, "dict": {"is": 1, "supported": "too"}} diff --git a/tests/core/test_stats.py b/tests/core/test_stats.py index 9a218010a189e..e2969204d1d32 100644 --- a/tests/core/test_stats.py +++ b/tests/core/test_stats.py @@ -36,8 +36,7 @@ PatternAllowListValidator, PatternBlockListValidator, ) - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars class CustomStatsd(statsd.StatsClient): diff --git a/tests/dag_processing/test_job_runner.py b/tests/dag_processing/test_job_runner.py index 81b94dd18b18a..e16ec2d1dddcc 100644 --- a/tests/dag_processing/test_job_runner.py +++ b/tests/dag_processing/test_job_runner.py @@ -58,12 +58,9 @@ from airflow.utils import timezone from airflow.utils.net import get_hostname from airflow.utils.session import create_session -from tests.core.test_logging_config import SETTINGS_FILE_VALID, settings_context -from tests.models import TEST_DAGS_FOLDER - -from dev.tests_common.test_utils.compat import ParseImportError -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import ( +from tests_common.test_utils.compat import ParseImportError +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import ( clear_db_callbacks, clear_db_dags, clear_db_import_errors, @@ -71,6 +68,9 @@ clear_db_serialized_dags, ) +from tests.core.test_logging_config import SETTINGS_FILE_VALID, settings_context +from tests.models import TEST_DAGS_FOLDER + pytestmark = pytest.mark.db_test logger = logging.getLogger(__name__) diff --git a/tests/dag_processing/test_processor.py b/tests/dag_processing/test_processor.py index c3a0f5a26e329..40412a131b40b 100644 --- a/tests/dag_processing/test_processor.py +++ b/tests/dag_processing/test_processor.py @@ -39,11 +39,10 @@ from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.asserts import assert_queries_count -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, ParseImportError -from dev.tests_common.test_utils.config import conf_vars, env_vars -from dev.tests_common.test_utils.db import ( +from tests_common.test_utils.asserts import assert_queries_count +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, ParseImportError +from tests_common.test_utils.config import conf_vars, env_vars +from tests_common.test_utils.db import ( clear_db_dags, clear_db_import_errors, clear_db_jobs, @@ -51,7 +50,7 @@ clear_db_runs, clear_db_serialized_dags, ) -from dev.tests_common.test_utils.mock_executor import MockExecutor +from tests_common.test_utils.mock_executor import MockExecutor if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/dags/test_external_task_sensor_check_existense.py b/tests/dags/test_external_task_sensor_check_existense.py index 05bd82c509f98..9de992c073b91 100644 --- a/tests/dags/test_external_task_sensor_check_existense.py +++ b/tests/dags/test_external_task_sensor_check_existense.py @@ -20,6 +20,7 @@ from airflow.models.dag import DAG from airflow.operators.empty import EmptyOperator from airflow.sensors.external_task import ExternalTaskSensor + from tests.models import DEFAULT_DATE with DAG( diff --git a/tests/dags/test_miscellaneous.py b/tests/dags/test_miscellaneous.py index b08e61e3bbddc..e54ae1b458491 100644 --- a/tests/dags/test_miscellaneous.py +++ b/tests/dags/test_miscellaneous.py @@ -23,8 +23,7 @@ from airflow.models.dag import DAG from airflow.operators.empty import EmptyOperator - -from dev.tests_common.test_utils.compat import BashOperator +from tests_common.test_utils.compat import BashOperator args = { "owner": "airflow", diff --git a/tests/dags/test_sensor.py b/tests/dags/test_sensor.py index 7c96160402663..5dc8fbe020901 100644 --- a/tests/dags/test_sensor.py +++ b/tests/dags/test_sensor.py @@ -21,8 +21,7 @@ from airflow.decorators import task from airflow.models.dag import DAG from airflow.utils import timezone - -from dev.tests_common.test_utils.compat import DateTimeSensor +from tests_common.test_utils.compat import DateTimeSensor with DAG( dag_id="test_sensor", start_date=datetime.datetime(2022, 1, 1), catchup=False, schedule="@once" diff --git a/tests/decorators/test_bash.py b/tests/decorators/test_bash.py index da79fb4cca0fb..dd2ce6223c53c 100644 --- a/tests/decorators/test_bash.py +++ b/tests/decorators/test_bash.py @@ -29,8 +29,7 @@ from airflow.models.renderedtifields import RenderedTaskInstanceFields from airflow.utils import timezone from airflow.utils.types import NOTSET - -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields DEFAULT_DATE = timezone.datetime(2023, 1, 1) diff --git a/tests/decorators/test_mapped.py b/tests/decorators/test_mapped.py index 2d3747b5f34ef..9bd59f03be7e1 100644 --- a/tests/decorators/test_mapped.py +++ b/tests/decorators/test_mapped.py @@ -22,6 +22,7 @@ from airflow.decorators import task from airflow.models.dag import DAG from airflow.utils.task_group import TaskGroup + from tests.models import DEFAULT_DATE diff --git a/tests/decorators/test_python.py b/tests/decorators/test_python.py index d34fbbf552b30..4307b29afb48b 100644 --- a/tests/decorators/test_python.py +++ b/tests/decorators/test_python.py @@ -39,9 +39,9 @@ from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import DagRunType from airflow.utils.xcom import XCOM_RETURN_KEY -from tests.operators.test_python import BasePythonTest +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests.operators.test_python import BasePythonTest if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/executors/test_executor_loader.py b/tests/executors/test_executor_loader.py index d9bf81dd3116d..d6e783282d0ef 100644 --- a/tests/executors/test_executor_loader.py +++ b/tests/executors/test_executor_loader.py @@ -29,8 +29,7 @@ from airflow.executors.local_executor import LocalExecutor from airflow.providers.amazon.aws.executors.ecs.ecs_executor import AwsEcsExecutor from airflow.providers.celery.executors.celery_executor import CeleryExecutor - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.skip_if_database_isolation_mode diff --git a/tests/integration/cli/commands/test_celery_command.py b/tests/integration/cli/commands/test_celery_command.py index 186addedb1164..bb6c097578a6c 100644 --- a/tests/integration/cli/commands/test_celery_command.py +++ b/tests/integration/cli/commands/test_celery_command.py @@ -25,8 +25,7 @@ from airflow.cli import cli_parser from airflow.cli.commands import celery_command from airflow.executors import executor_loader - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars @pytest.mark.integration("celery") diff --git a/tests/integration/executors/test_celery_executor.py b/tests/integration/executors/test_celery_executor.py index 169d3e91356c9..4dd5e2bdc94d9 100644 --- a/tests/integration/executors/test_celery_executor.py +++ b/tests/integration/executors/test_celery_executor.py @@ -45,8 +45,7 @@ from airflow.models.taskinstancekey import TaskInstanceKey from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.state import State, TaskInstanceState - -from dev.tests_common.test_utils import db +from tests_common.test_utils import db logger = logging.getLogger(__name__) diff --git a/tests/integration/security/test_kerberos.py b/tests/integration/security/test_kerberos.py index d16b8bc332c7d..7267b69442d78 100644 --- a/tests/integration/security/test_kerberos.py +++ b/tests/integration/security/test_kerberos.py @@ -26,8 +26,7 @@ from airflow.security import kerberos from airflow.security.kerberos import renew_from_kt - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars @pytest.mark.integration("kerberos") diff --git a/tests/jobs/test_base_job.py b/tests/jobs/test_base_job.py index 20079f4f235e5..058539868f1c0 100644 --- a/tests/jobs/test_base_job.py +++ b/tests/jobs/test_base_job.py @@ -32,11 +32,11 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State +from tests_common.test_utils.config import conf_vars + from tests.listeners import lifecycle_listener from tests.utils.test_helpers import MockJobRunner, SchedulerJobRunner, TriggererJobRunner -from dev.tests_common.test_utils.config import conf_vars - if TYPE_CHECKING: from airflow.serialization.pydantic.job import JobPydantic diff --git a/tests/jobs/test_local_task_job.py b/tests/jobs/test_local_task_job.py index 577fddd84d2b7..7d99c593511dd 100644 --- a/tests/jobs/test_local_task_job.py +++ b/tests/jobs/test_local_task_job.py @@ -52,12 +52,11 @@ from airflow.utils.state import State from airflow.utils.timeout import timeout from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils import db -from dev.tests_common.test_utils.asserts import assert_queries_count -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.mock_executor import MockExecutor +from tests_common.test_utils import db +from tests_common.test_utils.asserts import assert_queries_count +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.mock_executor import MockExecutor if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index 7eae1639d0e1b..f47f4497d5aab 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -69,15 +69,10 @@ from airflow.utils.session import create_session, provide_session from airflow.utils.state import DagRunState, JobState, State, TaskInstanceState from airflow.utils.types import DagRunType -from tests.listeners import dag_listener -from tests.listeners.test_listeners import get_listener_manager -from tests.models import TEST_DAGS_FOLDER -from tests.utils.test_timezone import UTC - -from dev.tests_common.test_utils.asserts import assert_queries_count -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars, env_vars -from dev.tests_common.test_utils.db import ( +from tests_common.test_utils.asserts import assert_queries_count +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars, env_vars +from tests_common.test_utils.db import ( clear_db_assets, clear_db_backfills, clear_db_dags, @@ -89,8 +84,13 @@ clear_db_sla_miss, set_default_pool_slots, ) -from dev.tests_common.test_utils.mock_executor import MockExecutor -from dev.tests_common.test_utils.mock_operators import CustomOperator +from tests_common.test_utils.mock_executor import MockExecutor +from tests_common.test_utils.mock_operators import CustomOperator + +from tests.listeners import dag_listener +from tests.listeners.test_listeners import get_listener_manager +from tests.models import TEST_DAGS_FOLDER +from tests.utils.test_timezone import UTC if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/jobs/test_triggerer_job.py b/tests/jobs/test_triggerer_job.py index da8405e197317..3987aa86438fa 100644 --- a/tests/jobs/test_triggerer_job.py +++ b/tests/jobs/test_triggerer_job.py @@ -46,9 +46,9 @@ from airflow.utils.session import create_session from airflow.utils.state import State, TaskInstanceState from airflow.utils.types import DagRunType -from tests.core.test_logging_config import reset_logging +from tests_common.test_utils.db import clear_db_dags, clear_db_runs -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs +from tests.core.test_logging_config import reset_logging pytestmark = pytest.mark.db_test diff --git a/tests/jobs/test_triggerer_job_logging.py b/tests/jobs/test_triggerer_job_logging.py index 4f39047f050ee..fcca727641512 100644 --- a/tests/jobs/test_triggerer_job_logging.py +++ b/tests/jobs/test_triggerer_job_logging.py @@ -30,8 +30,7 @@ from airflow.utils.log.file_task_handler import FileTaskHandler from airflow.utils.log.logging_mixin import RedirectStdHandler from airflow.utils.log.trigger_handler import DropTriggerLogsFilter, TriggererHandlerWrapper - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars def non_pytest_handlers(val): diff --git a/tests/lineage/test_hook.py b/tests/lineage/test_hook.py index 3fbbfaa021ffa..6287b39fc8fa1 100644 --- a/tests/lineage/test_hook.py +++ b/tests/lineage/test_hook.py @@ -33,8 +33,7 @@ NoOpCollector, get_hook_lineage_collector, ) - -from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager +from tests_common.test_utils.mock_plugins import mock_plugin_manager class TestHookLineageCollector: diff --git a/tests/lineage/test_lineage.py b/tests/lineage/test_lineage.py index ac42b2ee7f814..bd5b2ebe0fc03 100644 --- a/tests/lineage/test_lineage.py +++ b/tests/lineage/test_lineage.py @@ -30,8 +30,7 @@ from airflow.utils import timezone from airflow.utils.context import Context from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/listeners/class_listener.py b/tests/listeners/class_listener.py index 2b9cefa178d7d..5ae71733bf3bd 100644 --- a/tests/listeners/class_listener.py +++ b/tests/listeners/class_listener.py @@ -19,8 +19,7 @@ from airflow.listeners import hookimpl from airflow.utils.state import DagRunState, TaskInstanceState - -from dev.tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS +from tests_common.test_utils.compat import AIRFLOW_V_2_10_PLUS if AIRFLOW_V_2_10_PLUS: diff --git a/tests/listeners/test_asset_listener.py b/tests/listeners/test_asset_listener.py index bb93acd8a0fff..a075b87a7f3d2 100644 --- a/tests/listeners/test_asset_listener.py +++ b/tests/listeners/test_asset_listener.py @@ -23,6 +23,7 @@ from airflow.models.asset import AssetModel from airflow.operators.empty import EmptyOperator from airflow.utils.session import provide_session + from tests.listeners import asset_listener diff --git a/tests/listeners/test_dag_import_error_listener.py b/tests/listeners/test_dag_import_error_listener.py index 886044d27b44e..52a6103dc5bbd 100644 --- a/tests/listeners/test_dag_import_error_listener.py +++ b/tests/listeners/test_dag_import_error_listener.py @@ -31,10 +31,8 @@ from airflow.models import DagModel from airflow.models.errors import ParseImportError from airflow.utils import timezone -from tests.listeners import dag_import_error_listener - -from dev.tests_common.test_utils.config import conf_vars, env_vars -from dev.tests_common.test_utils.db import ( +from tests_common.test_utils.config import conf_vars, env_vars +from tests_common.test_utils.db import ( clear_db_dags, clear_db_import_errors, clear_db_jobs, @@ -43,7 +41,9 @@ clear_db_serialized_dags, clear_db_sla_miss, ) -from dev.tests_common.test_utils.mock_executor import MockExecutor +from tests_common.test_utils.mock_executor import MockExecutor + +from tests.listeners import dag_import_error_listener pytestmark = pytest.mark.db_test diff --git a/tests/listeners/test_listeners.py b/tests/listeners/test_listeners.py index 29ec25a9a8d2a..025e84f1cd696 100644 --- a/tests/listeners/test_listeners.py +++ b/tests/listeners/test_listeners.py @@ -29,6 +29,7 @@ from airflow.utils import timezone from airflow.utils.session import provide_session from airflow.utils.state import DagRunState, TaskInstanceState + from tests.listeners import ( class_listener, full_listener, diff --git a/tests/models/test_backfill.py b/tests/models/test_backfill.py index 06c41cadd8cc3..fafcca478c397 100644 --- a/tests/models/test_backfill.py +++ b/tests/models/test_backfill.py @@ -33,8 +33,7 @@ ) from airflow.operators.python import PythonOperator from airflow.utils.state import DagRunState - -from dev.tests_common.test_utils.db import ( +from tests_common.test_utils.db import ( clear_db_backfills, clear_db_dags, clear_db_runs, diff --git a/tests/models/test_base.py b/tests/models/test_base.py index 27eeba2912f52..2a58dc05009f3 100644 --- a/tests/models/test_base.py +++ b/tests/models/test_base.py @@ -19,8 +19,7 @@ import pytest from airflow.models.base import get_id_collation_args - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/models/test_baseoperator.py b/tests/models/test_baseoperator.py index 999529e14a997..b9edc2b56f5a4 100644 --- a/tests/models/test_baseoperator.py +++ b/tests/models/test_baseoperator.py @@ -49,9 +49,9 @@ from airflow.utils.template import literal from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import DagRunType -from tests.models import DEFAULT_DATE +from tests_common.test_utils.mock_operators import DeprecatedOperator, MockOperator -from dev.tests_common.test_utils.mock_operators import DeprecatedOperator, MockOperator +from tests.models import DEFAULT_DATE if TYPE_CHECKING: from airflow.utils.context import Context diff --git a/tests/models/test_cleartasks.py b/tests/models/test_cleartasks.py index 810453053dd0c..a58e308b01861 100644 --- a/tests/models/test_cleartasks.py +++ b/tests/models/test_cleartasks.py @@ -34,10 +34,10 @@ from airflow.utils.session import create_session from airflow.utils.state import DagRunState, State, TaskInstanceState from airflow.utils.types import DagRunType -from tests.models import DEFAULT_DATE +from tests_common.test_utils import db +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils import db -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests.models import DEFAULT_DATE if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index 67dc699fc3c8b..0fe22ccf77fe3 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -90,6 +90,19 @@ from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import DagRunType from airflow.utils.weight_rule import WeightRule +from tests_common.test_utils.asserts import assert_queries_count +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import ( + clear_db_assets, + clear_db_dags, + clear_db_runs, + clear_db_serialized_dags, +) +from tests_common.test_utils.mapping import expand_mapped_task +from tests_common.test_utils.mock_plugins import mock_plugin_manager +from tests_common.test_utils.timetables import cron_timetable, delta_timetable + from tests.models import DEFAULT_DATE from tests.plugins.priority_weight_strategy import ( FactorPriorityWeightStrategy, @@ -98,19 +111,6 @@ TestPriorityWeightStrategyPlugin, ) -from dev.tests_common.test_utils.asserts import assert_queries_count -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import ( - clear_db_assets, - clear_db_dags, - clear_db_runs, - clear_db_serialized_dags, -) -from dev.tests_common.test_utils.mapping import expand_mapped_task -from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager -from dev.tests_common.test_utils.timetables import cron_timetable, delta_timetable - if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/models/test_dagbag.py b/tests/models/test_dagbag.py index 0b477cc24c636..0973da449ef38 100644 --- a/tests/models/test_dagbag.py +++ b/tests/models/test_dagbag.py @@ -44,13 +44,13 @@ from airflow.utils.dates import timezone as tz from airflow.utils.session import create_session from airflow.www.security_appless import ApplessAirflowSecurityManager +from tests_common.test_utils import db +from tests_common.test_utils.asserts import assert_queries_count +from tests_common.test_utils.config import conf_vars + from tests import cluster_policies from tests.models import TEST_DAGS_FOLDER -from dev.tests_common.test_utils import db -from dev.tests_common.test_utils.asserts import assert_queries_count -from dev.tests_common.test_utils.config import conf_vars - pytestmark = pytest.mark.db_test example_dags_folder = pathlib.Path(airflow.example_dags.__path__[0]) # type: ignore[attr-defined] diff --git a/tests/models/test_dagcode.py b/tests/models/test_dagcode.py index 0bc105cfaf42c..f0cec513c09d3 100644 --- a/tests/models/test_dagcode.py +++ b/tests/models/test_dagcode.py @@ -30,8 +30,7 @@ # To move it to a shared module. from airflow.utils.file import open_maybe_zipped from airflow.utils.session import create_session - -from dev.tests_common.test_utils.db import clear_db_dag_code +from tests_common.test_utils.db import clear_db_dag_code pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/models/test_dagrun.py b/tests/models/test_dagrun.py index 46773232f77fe..587d514cf807e 100644 --- a/tests/models/test_dagrun.py +++ b/tests/models/test_dagrun.py @@ -46,12 +46,12 @@ from airflow.utils.state import DagRunState, State, TaskInstanceState from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import DagRunType -from tests.models import DEFAULT_DATE as _DEFAULT_DATE +from tests_common.test_utils import db +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.mock_operators import MockOperator -from dev.tests_common.test_utils import db -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.mock_operators import MockOperator +from tests.models import DEFAULT_DATE as _DEFAULT_DATE if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/models/test_dagwarning.py b/tests/models/test_dagwarning.py index 9b98e0c31d74d..95d12c005e4bb 100644 --- a/tests/models/test_dagwarning.py +++ b/tests/models/test_dagwarning.py @@ -25,8 +25,7 @@ from airflow.models import DagModel from airflow.models.dagwarning import DagWarning - -from dev.tests_common.test_utils.db import clear_db_dags +from tests_common.test_utils.db import clear_db_dags pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/models/test_mappedoperator.py b/tests/models/test_mappedoperator.py index 927dbfd73a563..7180421a90bd6 100644 --- a/tests/models/test_mappedoperator.py +++ b/tests/models/test_mappedoperator.py @@ -41,15 +41,15 @@ from airflow.utils.task_instance_session import set_current_task_instance_session from airflow.utils.trigger_rule import TriggerRule from airflow.utils.xcom import XCOM_RETURN_KEY -from tests.models import DEFAULT_DATE - -from dev.tests_common.test_utils.mapping import expand_mapped_task -from dev.tests_common.test_utils.mock_operators import ( +from tests_common.test_utils.mapping import expand_mapped_task +from tests_common.test_utils.mock_operators import ( MockOperator, MockOperatorWithNestedFields, NestedFields, ) +from tests.models import DEFAULT_DATE + pytestmark = pytest.mark.db_test if TYPE_CHECKING: diff --git a/tests/models/test_param.py b/tests/models/test_param.py index 2d324478deefe..7ade5bd21cebf 100644 --- a/tests/models/test_param.py +++ b/tests/models/test_param.py @@ -26,8 +26,7 @@ from airflow.serialization.serialized_objects import BaseSerialization from airflow.utils import timezone from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_xcom class TestParam: diff --git a/tests/models/test_pool.py b/tests/models/test_pool.py index 22852261671ce..698899be002e4 100644 --- a/tests/models/test_pool.py +++ b/tests/models/test_pool.py @@ -27,8 +27,7 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State - -from dev.tests_common.test_utils.db import ( +from tests_common.test_utils.db import ( clear_db_dags, clear_db_pools, clear_db_runs, diff --git a/tests/models/test_renderedtifields.py b/tests/models/test_renderedtifields.py index 6ff87b28a89b6..6d160164ecff8 100644 --- a/tests/models/test_renderedtifields.py +++ b/tests/models/test_renderedtifields.py @@ -37,9 +37,8 @@ from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.task_instance_session import set_current_task_instance_session from airflow.utils.timezone import datetime - -from dev.tests_common.test_utils.asserts import assert_queries_count -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields +from tests_common.test_utils.asserts import assert_queries_count +from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields pytestmark = pytest.mark.db_test diff --git a/tests/models/test_serialized_dag.py b/tests/models/test_serialized_dag.py index 5e6714feda373..2ed2d5090a2d6 100644 --- a/tests/models/test_serialized_dag.py +++ b/tests/models/test_serialized_dag.py @@ -36,9 +36,8 @@ from airflow.settings import json from airflow.utils.hashlib_wrapper import md5 from airflow.utils.session import create_session - -from dev.tests_common.test_utils import db -from dev.tests_common.test_utils.asserts import assert_queries_count +from tests_common.test_utils import db +from tests_common.test_utils.asserts import assert_queries_count pytestmark = pytest.mark.db_test diff --git a/tests/models/test_skipmixin.py b/tests/models/test_skipmixin.py index 0f2406c5737c5..e15f5521d1cd0 100644 --- a/tests/models/test_skipmixin.py +++ b/tests/models/test_skipmixin.py @@ -31,8 +31,7 @@ from airflow.utils import timezone from airflow.utils.state import State from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs +from tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/models/test_taskinstance.py b/tests/models/test_taskinstance.py index 03566dca30bc9..5753f51fcd2f8 100644 --- a/tests/models/test_taskinstance.py +++ b/tests/models/test_taskinstance.py @@ -98,13 +98,13 @@ from airflow.utils.task_instance_session import set_current_task_instance_session from airflow.utils.types import DagRunType from airflow.utils.xcom import XCOM_RETURN_KEY -from tests.models import DEFAULT_DATE, TEST_DAGS_FOLDER +from tests_common.test_utils import db +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_connections, clear_db_runs +from tests_common.test_utils.mock_operators import MockOperator -from dev.tests_common.test_utils import db -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_connections, clear_db_runs -from dev.tests_common.test_utils.mock_operators import MockOperator +from tests.models import DEFAULT_DATE, TEST_DAGS_FOLDER if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/models/test_timestamp.py b/tests/models/test_timestamp.py index 4888c2fdac268..6bf9b9b768a60 100644 --- a/tests/models/test_timestamp.py +++ b/tests/models/test_timestamp.py @@ -25,8 +25,7 @@ from airflow.utils import timezone from airflow.utils.session import provide_session from airflow.utils.state import State - -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_logs, clear_db_runs +from tests_common.test_utils.db import clear_db_dags, clear_db_logs, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/models/test_trigger.py b/tests/models/test_trigger.py index b44e4b9f6e881..8a9c21c685bbb 100644 --- a/tests/models/test_trigger.py +++ b/tests/models/test_trigger.py @@ -42,8 +42,7 @@ from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.xcom import XCOM_RETURN_KEY - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/models/test_variable.py b/tests/models/test_variable.py index dfe448092c842..29a5c94d9ae0d 100644 --- a/tests/models/test_variable.py +++ b/tests/models/test_variable.py @@ -27,9 +27,8 @@ from airflow.models import Variable, crypto, variable from airflow.secrets.cache import SecretCache from airflow.secrets.metastore import MetastoreBackend - -from dev.tests_common.test_utils import db -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils import db +from tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/models/test_xcom.py b/tests/models/test_xcom.py index 17ea5fa4ff114..ee47ac79fab1e 100644 --- a/tests/models/test_xcom.py +++ b/tests/models/test_xcom.py @@ -35,9 +35,8 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.xcom import XCOM_RETURN_KEY - -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import is_db_isolation_mode +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import is_db_isolation_mode pytestmark = pytest.mark.db_test diff --git a/tests/models/test_xcom_arg.py b/tests/models/test_xcom_arg.py index fbdd500661d22..11613bcfec6b9 100644 --- a/tests/models/test_xcom_arg.py +++ b/tests/models/test_xcom_arg.py @@ -22,8 +22,7 @@ from airflow.operators.python import PythonOperator from airflow.providers.standard.operators.bash import BashOperator from airflow.utils.types import NOTSET - -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs +from tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/operators/test_branch_operator.py b/tests/operators/test_branch_operator.py index 8943bf580e50a..3442ff0a7e5e1 100644 --- a/tests/operators/test_branch_operator.py +++ b/tests/operators/test_branch_operator.py @@ -28,8 +28,7 @@ from airflow.utils.state import State from airflow.utils.task_group import TaskGroup from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/operators/test_email.py b/tests/operators/test_email.py index c86e0f94f6145..8ef2634aad4d3 100644 --- a/tests/operators/test_email.py +++ b/tests/operators/test_email.py @@ -24,8 +24,7 @@ from airflow.operators.email import EmailOperator from airflow.utils import timezone - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/operators/test_latest_only_operator.py b/tests/operators/test_latest_only_operator.py index 78fabc4ca922f..57bc2d27c1a3f 100644 --- a/tests/operators/test_latest_only_operator.py +++ b/tests/operators/test_latest_only_operator.py @@ -30,9 +30,8 @@ from airflow.utils.state import State from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_runs, clear_db_xcom if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/operators/test_python.py b/tests/operators/test_python.py index 3dd8f506ee986..5c91362723c1e 100644 --- a/tests/operators/test_python.py +++ b/tests/operators/test_python.py @@ -68,10 +68,9 @@ from airflow.utils.state import DagRunState, State, TaskInstanceState from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import NOTSET, DagRunType - -from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_runs +from tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/plugins/test_plugin.py b/tests/plugins/test_plugin.py index fd474766d41ea..8751d75cd20fd 100644 --- a/tests/plugins/test_plugin.py +++ b/tests/plugins/test_plugin.py @@ -33,10 +33,7 @@ from airflow.task.priority_strategy import PriorityWeightStrategy from airflow.ti_deps.deps.base_ti_dep import BaseTIDep from airflow.timetables.interval import CronDataIntervalTimetable -from tests.listeners import empty_listener -from tests.listeners.class_listener import ClassBasedListener - -from dev.tests_common.test_utils.mock_operators import ( +from tests_common.test_utils.mock_operators import ( AirflowLink, AirflowLink2, CustomBaseIndexOpLink, @@ -45,6 +42,9 @@ GoogleLink, ) +from tests.listeners import empty_listener +from tests.listeners.class_listener import ClassBasedListener + # Will show up under airflow.hooks.test_plugin.PluginHook class PluginHook(BaseHook): diff --git a/tests/plugins/test_plugins_manager.py b/tests/plugins/test_plugins_manager.py index 1c54c3ebb195c..0c742dd70c67d 100644 --- a/tests/plugins/test_plugins_manager.py +++ b/tests/plugins/test_plugins_manager.py @@ -33,9 +33,8 @@ from airflow.plugins_manager import AirflowPlugin from airflow.utils.module_loading import qualname from airflow.www import app as application - -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.mock_plugins import mock_plugin_manager pytestmark = pytest.mark.db_test diff --git a/tests/secrets/test_cache.py b/tests/secrets/test_cache.py index 40ab4aa290df5..86ed57688f34d 100644 --- a/tests/secrets/test_cache.py +++ b/tests/secrets/test_cache.py @@ -22,8 +22,7 @@ import pytest from airflow.secrets.cache import SecretCache - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars def test_cache_disabled_by_default(): diff --git a/tests/security/test_kerberos.py b/tests/security/test_kerberos.py index 2b661fe9a1478..9f15c0296894c 100644 --- a/tests/security/test_kerberos.py +++ b/tests/security/test_kerberos.py @@ -25,8 +25,7 @@ from airflow.security import kerberos from airflow.security.kerberos import get_kerberos_principle, renew_from_kt - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/sensors/test_base.py b/tests/sensors/test_base.py index 8a3b2b5c5c0fa..f89cee879f9b8 100644 --- a/tests/sensors/test_base.py +++ b/tests/sensors/test_base.py @@ -57,8 +57,7 @@ from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.timezone import datetime - -from dev.tests_common.test_utils import db +from tests_common.test_utils import db pytestmark = pytest.mark.db_test diff --git a/tests/sensors/test_external_task_sensor.py b/tests/sensors/test_external_task_sensor.py index f1739e6b84418..2f17bfd38d133 100644 --- a/tests/sensors/test_external_task_sensor.py +++ b/tests/sensors/test_external_task_sensor.py @@ -51,11 +51,11 @@ from airflow.utils.task_group import TaskGroup from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType -from tests.models import TEST_DAGS_FOLDER +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_runs +from tests_common.test_utils.mock_operators import MockOperator -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_runs -from dev.tests_common.test_utils.mock_operators import MockOperator +from tests.models import TEST_DAGS_FOLDER if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/sensors/test_python.py b/tests/sensors/test_python.py index 60814620b3b99..0a898b339108c 100644 --- a/tests/sensors/test_python.py +++ b/tests/sensors/test_python.py @@ -25,6 +25,7 @@ from airflow.exceptions import AirflowSensorTimeout from airflow.sensors.base import PokeReturnValue from airflow.sensors.python import PythonSensor + from tests.operators.test_python import BasePythonTest pytestmark = pytest.mark.db_test diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py index 35f5e196a2222..da6723800d54a 100644 --- a/tests/serialization/test_dag_serialization.py +++ b/tests/serialization/test_dag_serialization.py @@ -80,10 +80,9 @@ from airflow.utils.operator_resources import Resources from airflow.utils.task_group import TaskGroup from airflow.utils.xcom import XCOM_RETURN_KEY - -from dev.tests_common.test_utils.compat import BaseOperatorLink -from dev.tests_common.test_utils.mock_operators import AirflowLink2, CustomOperator, GoogleLink, MockOperator -from dev.tests_common.test_utils.timetables import ( +from tests_common.test_utils.compat import BaseOperatorLink +from tests_common.test_utils.mock_operators import AirflowLink2, CustomOperator, GoogleLink, MockOperator +from tests_common.test_utils.timetables import ( CustomSerializationTimetable, cron_timetable, delta_timetable, @@ -190,9 +189,7 @@ "max_retry_delay": 600.0, "downstream_task_ids": [], "_is_empty": False, - "_operator_extra_links": [ - {"dev.tests_common.test_utils.mock_operators.CustomOpLink": {}} - ], + "_operator_extra_links": [{"tests_common.test_utils.mock_operators.CustomOpLink": {}}], "ui_color": "#fff", "ui_fgcolor": "#000", "template_ext": [], @@ -200,7 +197,7 @@ "template_fields_renderers": {}, "_task_type": "CustomOperator", "_operator_name": "@custom", - "_task_module": "dev.tests_common.test_utils.mock_operators", + "_task_module": "tests_common.test_utils.mock_operators", "pool": "default_pool", "is_setup": False, "is_teardown": False, @@ -240,7 +237,7 @@ ) CUSTOM_TIMETABLE_SERIALIZED = { - "__type": "dev.tests_common.test_utils.timetables.CustomSerializationTimetable", + "__type": "tests_common.test_utils.timetables.CustomSerializationTimetable", "__var": {"value": "foo"}, } @@ -373,7 +370,7 @@ def timetable_plugin(monkeypatch): monkeypatch.setattr( plugins_manager, "timetable_classes", - {"dev.tests_common.test_utils.timetables.CustomSerializationTimetable": CustomSerializationTimetable}, + {"tests_common.test_utils.timetables.CustomSerializationTimetable": CustomSerializationTimetable}, ) @@ -469,7 +466,7 @@ def test_dag_serialization_unregistered_custom_timetable(self): message = ( "Failed to serialize DAG 'simple_dag': Timetable class " - "'dev.tests_common.test_utils.timetables.CustomSerializationTimetable' " + "'tests_common.test_utils.timetables.CustomSerializationTimetable' " "is not registered or " "you have a top level database access that disrupted the session. " "Please check the airflow best practices documentation." @@ -834,7 +831,7 @@ def test_deserialization_timetable_unregistered(self): SerializedDAG.from_dict(serialized) message = ( "Timetable class " - "'dev.tests_common.test_utils.timetables.CustomSerializationTimetable' " + "'tests_common.test_utils.timetables.CustomSerializationTimetable' " "is not registered or " "you have a top level database access that disrupted the session. " "Please check the airflow best practices documentation." @@ -986,15 +983,15 @@ def test_task_params_roundtrip(self, val, expected_val): [ pytest.param( "true", - [{"dev.tests_common.test_utils.mock_operators.CustomOpLink": {}}], + [{"tests_common.test_utils.mock_operators.CustomOpLink": {}}], {"Google Custom": "http://google.com/custom_base_link?search=true"}, id="non-indexed-link", ), pytest.param( ["echo", "true"], [ - {"dev.tests_common.test_utils.mock_operators.CustomBaseIndexOpLink": {"index": 0}}, - {"dev.tests_common.test_utils.mock_operators.CustomBaseIndexOpLink": {"index": 1}}, + {"tests_common.test_utils.mock_operators.CustomBaseIndexOpLink": {"index": 0}}, + {"tests_common.test_utils.mock_operators.CustomBaseIndexOpLink": {"index": 1}}, ], { "BigQuery Console #1": "https://console.cloud.google.com/bigquery?j=echo", @@ -1297,7 +1294,7 @@ def test_operator_deserialize_old_names(self): "template_fields": ["bash_command"], "template_fields_renderers": {}, "_task_type": "CustomOperator", - "_task_module": "dev.tests_common.test_utils.mock_operators", + "_task_module": "tests_common.test_utils.mock_operators", "pool": "default_pool", "ui_color": "#fff", "ui_fgcolor": "#000", @@ -2354,7 +2351,7 @@ def test_operator_expand_xcomarg_serde(): "_is_empty": False, "_is_mapped": True, "_needs_expansion": True, - "_task_module": "dev.tests_common.test_utils.mock_operators", + "_task_module": "tests_common.test_utils.mock_operators", "_task_type": "MockOperator", "downstream_task_ids": [], "expand_input": { @@ -2409,7 +2406,7 @@ def test_operator_expand_kwargs_literal_serde(strict): "_is_empty": False, "_is_mapped": True, "_needs_expansion": True, - "_task_module": "dev.tests_common.test_utils.mock_operators", + "_task_module": "tests_common.test_utils.mock_operators", "_task_type": "MockOperator", "downstream_task_ids": [], "expand_input": { @@ -2464,7 +2461,7 @@ def test_operator_expand_kwargs_xcomarg_serde(strict): "_is_empty": False, "_is_mapped": True, "_needs_expansion": True, - "_task_module": "dev.tests_common.test_utils.mock_operators", + "_task_module": "tests_common.test_utils.mock_operators", "_task_type": "MockOperator", "downstream_task_ids": [], "expand_input": { @@ -2827,7 +2824,7 @@ def operator_extra_links(self): "_disallow_kwargs_override": False, "_expand_input_attr": "expand_input", "downstream_task_ids": [], - "_operator_extra_links": [{"dev.tests_common.test_utils.mock_operators.AirflowLink2": {}}], + "_operator_extra_links": [{"tests_common.test_utils.mock_operators.AirflowLink2": {}}], "ui_color": "#fff", "ui_fgcolor": "#000", "template_ext": [], diff --git a/tests/serialization/test_pydantic_models.py b/tests/serialization/test_pydantic_models.py index 423dc41a38068..6ecf9c8d5d90d 100644 --- a/tests/serialization/test_pydantic_models.py +++ b/tests/serialization/test_pydantic_models.py @@ -44,9 +44,9 @@ from airflow.utils import timezone from airflow.utils.state import State from airflow.utils.types import AttributeRemoved, DagRunType -from tests.models import DEFAULT_DATE +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests.models import DEFAULT_DATE if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/serialization/test_serde.py b/tests/serialization/test_serde.py index 5c915839c6422..829195476c3d6 100644 --- a/tests/serialization/test_serde.py +++ b/tests/serialization/test_serde.py @@ -43,8 +43,7 @@ serialize, ) from airflow.utils.module_loading import import_string, iter_namespace, qualname - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars @pytest.fixture diff --git a/tests/serialization/test_serialized_objects.py b/tests/serialization/test_serialized_objects.py index 67fdceeaf08d1..5d35278d89bfc 100644 --- a/tests/serialization/test_serialized_objects.py +++ b/tests/serialization/test_serialized_objects.py @@ -67,6 +67,7 @@ from airflow.utils.state import DagRunState, State from airflow.utils.task_group import TaskGroup from airflow.utils.types import DagRunType + from tests import REPO_ROOT @@ -471,8 +472,7 @@ def test_all_pydantic_models_round_trip(): @pytest.mark.db_test def test_serialized_mapped_operator_unmap(dag_maker): from airflow.serialization.serialized_objects import SerializedDAG - - from dev.tests_common.test_utils.mock_operators import MockOperator + from tests_common.test_utils.mock_operators import MockOperator with dag_maker(dag_id="dag") as dag: MockOperator(task_id="task1", arg1="x") diff --git a/tests/system/core/example_external_task_child_deferrable.py b/tests/system/core/example_external_task_child_deferrable.py index 4e8eca3b15eb2..1da360b57b56d 100644 --- a/tests/system/core/example_external_task_child_deferrable.py +++ b/tests/system/core/example_external_task_child_deferrable.py @@ -34,7 +34,7 @@ ) -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/core/example_external_task_parent_deferrable.py b/tests/system/core/example_external_task_parent_deferrable.py index 1a64f7e98ae8a..62bb1afc18b2e 100644 --- a/tests/system/core/example_external_task_parent_deferrable.py +++ b/tests/system/core/example_external_task_parent_deferrable.py @@ -57,14 +57,14 @@ start >> [trigger_child_task, external_task_sensor] >> end - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "teardown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run +from tests_common.test_utils.system_tests import get_test_run # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/system/example_empty.py b/tests/system/example_empty.py index de316a11eee77..d2d362012c633 100644 --- a/tests/system/example_empty.py +++ b/tests/system/example_empty.py @@ -35,14 +35,14 @@ chain(task) - from dev.tests_common.test_utils.watcher import watcher + from tests_common.test_utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() -from dev.tests_common.test_utils.system_tests import get_test_run # noqa: E402 +from tests_common.test_utils.system_tests import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag) diff --git a/tests/task/task_runner/test_standard_task_runner.py b/tests/task/task_runner/test_standard_task_runner.py index 55e3d34192d60..7e1c80c8f8d6f 100644 --- a/tests/task/task_runner/test_standard_task_runner.py +++ b/tests/task/task_runner/test_standard_task_runner.py @@ -40,12 +40,12 @@ from airflow.utils.platform import getuser from airflow.utils.state import State from airflow.utils.timeout import timeout +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_runs + from tests.listeners import xcom_listener from tests.listeners.file_write_listener import FileWriteListener -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_runs - if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/ti_deps/deps/test_pool_slots_available_dep.py b/tests/ti_deps/deps/test_pool_slots_available_dep.py index d7fc33aa41bf8..1cf2d1d2072fe 100644 --- a/tests/ti_deps/deps/test_pool_slots_available_dep.py +++ b/tests/ti_deps/deps/test_pool_slots_available_dep.py @@ -26,8 +26,7 @@ from airflow.ti_deps.deps.pool_slots_available_dep import PoolSlotsAvailableDep from airflow.utils.session import create_session from airflow.utils.state import TaskInstanceState - -from dev.tests_common.test_utils import db +from tests_common.test_utils import db pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/ti_deps/deps/test_prev_dagrun_dep.py b/tests/ti_deps/deps/test_prev_dagrun_dep.py index 638f4b69d7e92..bb7ea047ac651 100644 --- a/tests/ti_deps/deps/test_prev_dagrun_dep.py +++ b/tests/ti_deps/deps/test_prev_dagrun_dep.py @@ -29,9 +29,8 @@ from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.timezone import convert_to_utc, datetime from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_runs +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/ti_deps/deps/test_ready_to_reschedule_dep.py b/tests/ti_deps/deps/test_ready_to_reschedule_dep.py index d137d43bbf38d..02b291797956f 100644 --- a/tests/ti_deps/deps/test_ready_to_reschedule_dep.py +++ b/tests/ti_deps/deps/test_ready_to_reschedule_dep.py @@ -30,8 +30,7 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.utils.state import State - -from dev.tests_common.test_utils import db +from tests_common.test_utils import db pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/utils/log/test_log_reader.py b/tests/utils/log/test_log_reader.py index 53e702efd8a8e..95d8471a3a35b 100644 --- a/tests/utils/log/test_log_reader.py +++ b/tests/utils/log/test_log_reader.py @@ -38,9 +38,8 @@ from airflow.utils.log.logging_mixin import ExternalLoggingMixin from airflow.utils.state import TaskInstanceState from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_dags, clear_db_runs pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/utils/log/test_secrets_masker.py b/tests/utils/log/test_secrets_masker.py index f3fed73675cce..ffe37fbfc4955 100644 --- a/tests/utils/log/test_secrets_masker.py +++ b/tests/utils/log/test_secrets_masker.py @@ -38,8 +38,7 @@ should_hide_value_for_key, ) from airflow.utils.state import DagRunState, JobState, State, TaskInstanceState - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.enable_redact p = "password" diff --git a/tests/utils/log/test_task_context_logger.py b/tests/utils/log/test_task_context_logger.py index 3a08947ede091..5d326a240f45e 100644 --- a/tests/utils/log/test_task_context_logger.py +++ b/tests/utils/log/test_task_context_logger.py @@ -24,9 +24,8 @@ from airflow.models.taskinstancekey import TaskInstanceKey from airflow.utils.log.task_context_logger import TaskContextLogger - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_dag_cycle.py b/tests/utils/test_dag_cycle.py index 1cf607fd8ee50..55937f1382e38 100644 --- a/tests/utils/test_dag_cycle.py +++ b/tests/utils/test_dag_cycle.py @@ -24,6 +24,7 @@ from airflow.utils.dag_cycle_tester import check_cycle from airflow.utils.edgemodifier import Label from airflow.utils.task_group import TaskGroup + from tests.models import DEFAULT_DATE diff --git a/tests/utils/test_db.py b/tests/utils/test_db.py index 0f3a56a5d1c8d..dcfcd16421527 100644 --- a/tests/utils/test_db.py +++ b/tests/utils/test_db.py @@ -50,8 +50,7 @@ upgradedb, ) from airflow.utils.db_manager import RunDBManager - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/utils/test_db_cleanup.py b/tests/utils/test_db_cleanup.py index 3b5a3b2227231..c7533827d805d 100644 --- a/tests/utils/test_db_cleanup.py +++ b/tests/utils/test_db_cleanup.py @@ -48,8 +48,7 @@ run_cleanup, ) from airflow.utils.session import create_session - -from dev.tests_common.test_utils.db import ( +from tests_common.test_utils.db import ( clear_db_assets, clear_db_dags, clear_db_runs, diff --git a/tests/utils/test_db_manager.py b/tests/utils/test_db_manager.py index 975ea65499aae..001d89e1d4c59 100644 --- a/tests/utils/test_db_manager.py +++ b/tests/utils/test_db_manager.py @@ -25,8 +25,7 @@ from airflow.models import Base from airflow.utils.db import downgrade, initdb from airflow.utils.db_manager import BaseDBManager, RunDBManager - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = [pytest.mark.db_test] diff --git a/tests/utils/test_dot_renderer.py b/tests/utils/test_dot_renderer.py index d1521ca6e0f69..718d2d03b0832 100644 --- a/tests/utils/test_dot_renderer.py +++ b/tests/utils/test_dot_renderer.py @@ -29,9 +29,8 @@ from airflow.utils import dot_renderer, timezone from airflow.utils.state import State from airflow.utils.task_group import TaskGroup - -from dev.tests_common.test_utils.compat import BashOperator -from dev.tests_common.test_utils.db import clear_db_dags +from tests_common.test_utils.compat import BashOperator +from tests_common.test_utils.db import clear_db_dags START_DATE = timezone.utcnow() diff --git a/tests/utils/test_email.py b/tests/utils/test_email.py index b47dcbc87585a..ac90c20ef6a75 100644 --- a/tests/utils/test_email.py +++ b/tests/utils/test_email.py @@ -29,8 +29,7 @@ from airflow.configuration import conf from airflow.utils import email - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars EMAILS = ["test1@example.com", "test2@example.com"] diff --git a/tests/utils/test_file.py b/tests/utils/test_file.py index 9424b90a92cf4..a7ca4bd5575f7 100644 --- a/tests/utils/test_file.py +++ b/tests/utils/test_file.py @@ -26,9 +26,9 @@ from airflow.utils import file as file_utils from airflow.utils.file import correct_maybe_zipped, find_path_from_directory, open_maybe_zipped -from tests.models import TEST_DAGS_FOLDER +from tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.config import conf_vars +from tests.models import TEST_DAGS_FOLDER def might_contain_dag(file_path: str, zip_file: zipfile.ZipFile | None = None): diff --git a/tests/utils/test_helpers.py b/tests/utils/test_helpers.py index 0b0046b1ba082..ded59b999f8bf 100644 --- a/tests/utils/test_helpers.py +++ b/tests/utils/test_helpers.py @@ -37,9 +37,8 @@ validate_key, ) from airflow.utils.types import NOTSET - -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_dags, clear_db_runs if TYPE_CHECKING: from airflow.jobs.job import Job diff --git a/tests/utils/test_log_handlers.py b/tests/utils/test_log_handlers.py index 4be3564567ebd..c1135d8fe4a18 100644 --- a/tests/utils/test_log_handlers.py +++ b/tests/utils/test_log_handlers.py @@ -54,9 +54,8 @@ from airflow.utils.state import State, TaskInstanceState from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index 60823911b8b03..50fdd207dec3f 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -24,8 +24,7 @@ from airflow.exceptions import AirflowConfigException from airflow.utils import net - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars def get_hostname(): diff --git a/tests/utils/test_serve_logs.py b/tests/utils/test_serve_logs.py index 8d41202be2545..0c6fb8d87591e 100644 --- a/tests/utils/test_serve_logs.py +++ b/tests/utils/test_serve_logs.py @@ -28,8 +28,7 @@ from airflow.utils import timezone from airflow.utils.jwt_signer import JWTSigner from airflow.utils.serve_logs import create_app - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars if TYPE_CHECKING: from flask.testing import FlaskClient diff --git a/tests/utils/test_sqlalchemy.py b/tests/utils/test_sqlalchemy.py index 42717a4fb045a..49e4db42788a6 100644 --- a/tests/utils/test_sqlalchemy.py +++ b/tests/utils/test_sqlalchemy.py @@ -42,8 +42,7 @@ ) from airflow.utils.state import State from airflow.utils.timezone import utcnow - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_state.py b/tests/utils/test_state.py index 6447477ef589b..12d857ee1999e 100644 --- a/tests/utils/test_state.py +++ b/tests/utils/test_state.py @@ -25,9 +25,9 @@ from airflow.utils.session import create_session from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType -from tests.models import DEFAULT_DATE +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests.models import DEFAULT_DATE if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_task_group.py b/tests/utils/test_task_group.py index 6ea0fcdc36e13..6b2185bf791fc 100644 --- a/tests/utils/test_task_group.py +++ b/tests/utils/test_task_group.py @@ -38,9 +38,9 @@ from airflow.operators.python import PythonOperator from airflow.utils.dag_edges import dag_edges from airflow.utils.task_group import TASKGROUP_ARGS_EXPECTED_TYPES, TaskGroup, task_group_to_dict -from tests.models import DEFAULT_DATE +from tests_common.test_utils.compat import BashOperator -from dev.tests_common.test_utils.compat import BashOperator +from tests.models import DEFAULT_DATE def make_task(name, type_="classic"): @@ -1415,7 +1415,7 @@ def test_task_group_edge_modifier_chain(): def test_mapped_task_group_id_prefix_task_id(): - from dev.tests_common.test_utils.mock_operators import MockOperator + from tests_common.test_utils.mock_operators import MockOperator with DAG(dag_id="d", schedule=None, start_date=DEFAULT_DATE) as dag: t1 = MockOperator.partial(task_id="t1").expand(arg1=[]) diff --git a/tests/utils/test_task_handler_with_custom_formatter.py b/tests/utils/test_task_handler_with_custom_formatter.py index aee646d858b15..785cb34eaa013 100644 --- a/tests/utils/test_task_handler_with_custom_formatter.py +++ b/tests/utils/test_task_handler_with_custom_formatter.py @@ -28,10 +28,9 @@ from airflow.utils.state import DagRunState from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_runs +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_runs if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/utils/test_types.py b/tests/utils/test_types.py index cb5849b20599a..d982730392076 100644 --- a/tests/utils/test_types.py +++ b/tests/utils/test_types.py @@ -25,9 +25,9 @@ from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.types import DagRunType -from tests.models import DEFAULT_DATE +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests.models import DEFAULT_DATE if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/test_app.py b/tests/www/test_app.py index 673abd371dfb5..20dd5eb05e7e0 100644 --- a/tests/www/test_app.py +++ b/tests/www/test_app.py @@ -29,9 +29,8 @@ from airflow.exceptions import AirflowConfigException from airflow.www import app as application - -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.decorators import dont_initialize_flask_app_submodules pytestmark = [pytest.mark.db_test, pytest.mark.skip_if_database_isolation_mode] diff --git a/tests/www/test_utils.py b/tests/www/test_utils.py index f8f0ae983382e..1ee0415b822a4 100644 --- a/tests/www/test_utils.py +++ b/tests/www/test_utils.py @@ -44,8 +44,7 @@ wrapped_markdown, ) from airflow.www.widgets import AirflowDateTimePickerROWidget, BS3TextAreaROWidget, BS3TextFieldROWidget - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/conftest.py b/tests/www/views/conftest.py index faa1cc23a8d36..e5bf5f947ac3b 100644 --- a/tests/www/views/conftest.py +++ b/tests/www/views/conftest.py @@ -27,11 +27,10 @@ from airflow import settings from airflow.models import DagBag from airflow.www.app import create_app - -from dev.tests_common.test_utils.api_connexion_utils import delete_user -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules -from dev.tests_common.test_utils.www import ( +from tests_common.test_utils.api_connexion_utils import delete_user +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from tests_common.test_utils.www import ( client_with_login, client_without_login, client_without_login_as_admin, diff --git a/tests/www/views/test_anonymous_as_admin_role.py b/tests/www/views/test_anonymous_as_admin_role.py index 700f03f8e63c8..f881a35f95594 100644 --- a/tests/www/views/test_anonymous_as_admin_role.py +++ b/tests/www/views/test_anonymous_as_admin_role.py @@ -23,8 +23,7 @@ from airflow.models import Pool from airflow.utils.session import create_session - -from dev.tests_common.test_utils.www import check_content_in_response +from tests_common.test_utils.www import check_content_in_response pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_session.py b/tests/www/views/test_session.py index 009917606bda3..3bce5ebd9bf0d 100644 --- a/tests/www/views/test_session.py +++ b/tests/www/views/test_session.py @@ -22,9 +22,8 @@ from airflow.exceptions import AirflowConfigException from airflow.www import app - -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.decorators import dont_initialize_flask_app_submodules pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views.py b/tests/www/views/test_views.py index bae022316c73d..46d2ea7b194f9 100644 --- a/tests/www/views/test_views.py +++ b/tests/www/views/test_views.py @@ -42,11 +42,10 @@ get_task_stats_from_query, get_value_from_path, ) - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.mock_plugins import mock_plugin_manager -from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.mock_plugins import mock_plugin_manager +from tests_common.test_utils.www import check_content_in_response, check_content_not_in_response if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType @@ -330,8 +329,7 @@ def test_mark_task_instance_state(test_app): from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType from airflow.www.views import Airflow - - from dev.tests_common.test_utils.db import clear_db_runs + from tests_common.test_utils.db import clear_db_runs clear_db_runs() start_date = datetime(2020, 1, 1) @@ -424,8 +422,7 @@ def test_mark_task_group_state(test_app): from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType from airflow.www.views import Airflow - - from dev.tests_common.test_utils.db import clear_db_runs + from tests_common.test_utils.db import clear_db_runs clear_db_runs() start_date = datetime(2020, 1, 1) diff --git a/tests/www/views/test_views_acl.py b/tests/www/views/test_views_acl.py index 23fb26c18cf98..08e70b5727626 100644 --- a/tests/www/views/test_views_acl.py +++ b/tests/www/views/test_views_acl.py @@ -30,15 +30,15 @@ from airflow.utils.state import State from airflow.utils.types import DagRunType from airflow.www.views import FILTER_STATUS_COOKIE - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_runs -from dev.tests_common.test_utils.permissions import _resource_name -from dev.tests_common.test_utils.www import ( +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_runs +from tests_common.test_utils.permissions import _resource_name +from tests_common.test_utils.www import ( check_content_in_response, check_content_not_in_response, client_with_login, ) + from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user_scope if AIRFLOW_V_3_0_PLUS: diff --git a/tests/www/views/test_views_base.py b/tests/www/views/test_views_base.py index 692f0301e2276..3254441c74da8 100644 --- a/tests/www/views/test_views_base.py +++ b/tests/www/views/test_views_base.py @@ -27,10 +27,9 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.www import app as application - -from dev.tests_common.test_utils.asserts import assert_queries_count -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response +from tests_common.test_utils.asserts import assert_queries_count +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.www import check_content_in_response, check_content_not_in_response pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_cluster_activity.py b/tests/www/views/test_views_cluster_activity.py index 6b83de3c7682a..64063e069db69 100644 --- a/tests/www/views/test_views_cluster_activity.py +++ b/tests/www/views/test_views_cluster_activity.py @@ -26,8 +26,7 @@ from airflow.operators.empty import EmptyOperator from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.db import clear_db_runs +from tests_common.test_utils.db import clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_configuration.py b/tests/www/views/test_views_configuration.py index 90ab9b7faa933..4bd1f2fc4c3c8 100644 --- a/tests/www/views/test_views_configuration.py +++ b/tests/www/views/test_views_configuration.py @@ -21,9 +21,8 @@ import pytest from airflow.configuration import conf - -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.www import check_content_in_response, check_content_not_in_response pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_connection.py b/tests/www/views/test_views_connection.py index 66e571780eb96..7b0229c315b96 100644 --- a/tests/www/views/test_views_connection.py +++ b/tests/www/views/test_views_connection.py @@ -27,8 +27,7 @@ from airflow.models import Connection from airflow.utils.session import create_session from airflow.www.views import ConnectionFormWidget, ConnectionModelView - -from dev.tests_common.test_utils.www import ( +from tests_common.test_utils.www import ( _check_last_log, _check_last_log_masked_connection, check_content_in_response, diff --git a/tests/www/views/test_views_custom_user_views.py b/tests/www/views/test_views_custom_user_views.py index 49ecc2c4f30b2..3c00242308b75 100644 --- a/tests/www/views/test_views_custom_user_views.py +++ b/tests/www/views/test_views_custom_user_views.py @@ -27,12 +27,12 @@ from airflow import settings from airflow.security import permissions from airflow.www import app as application - -from dev.tests_common.test_utils.www import ( +from tests_common.test_utils.www import ( check_content_in_response, check_content_not_in_response, client_with_login, ) + from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, delete_role, diff --git a/tests/www/views/test_views_dagrun.py b/tests/www/views/test_views_dagrun.py index 3fb8eb02fb633..9a30e0a3511eb 100644 --- a/tests/www/views/test_views_dagrun.py +++ b/tests/www/views/test_views_dagrun.py @@ -24,19 +24,19 @@ from airflow.utils import timezone from airflow.utils.session import create_session from airflow.www.views import DagRunModelView -from tests.www.views.test_views_tasks import _get_appbuilder_pk_string - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.www import ( +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.www import ( check_content_in_response, check_content_not_in_response, client_with_login, ) + from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, delete_roles, delete_user, ) +from tests.www.views.test_views_tasks import _get_appbuilder_pk_string if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_dataset.py b/tests/www/views/test_views_dataset.py index 899d64e9754ed..9d83715f3e210 100644 --- a/tests/www/views/test_views_dataset.py +++ b/tests/www/views/test_views_dataset.py @@ -24,9 +24,8 @@ from airflow.assets import Asset from airflow.models.asset import AssetEvent, AssetModel from airflow.operators.empty import EmptyOperator - -from dev.tests_common.test_utils.asserts import assert_queries_count -from dev.tests_common.test_utils.db import clear_db_assets +from tests_common.test_utils.asserts import assert_queries_count +from tests_common.test_utils.db import clear_db_assets pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_decorators.py b/tests/www/views/test_views_decorators.py index bdc402989fd0c..730fc55b4548b 100644 --- a/tests/www/views/test_views_decorators.py +++ b/tests/www/views/test_views_decorators.py @@ -23,10 +23,9 @@ from airflow.utils import timezone from airflow.utils.state import State from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.db import clear_db_runs, clear_db_variables -from dev.tests_common.test_utils.www import ( +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.db import clear_db_runs, clear_db_variables +from tests_common.test_utils.www import ( _check_last_log, _check_last_log_masked_variable, check_content_in_response, diff --git a/tests/www/views/test_views_extra_links.py b/tests/www/views/test_views_extra_links.py index fbc11bf51f337..bc06a09c8f95b 100644 --- a/tests/www/views/test_views_extra_links.py +++ b/tests/www/views/test_views_extra_links.py @@ -28,10 +28,13 @@ from airflow.utils import timezone from airflow.utils.state import DagRunState from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BaseOperatorLink -from dev.tests_common.test_utils.db import clear_db_runs -from dev.tests_common.test_utils.mock_operators import AirflowLink, Dummy2TestOperator, Dummy3TestOperator +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BaseOperatorLink +from tests_common.test_utils.db import clear_db_runs +from tests_common.test_utils.mock_operators import ( + AirflowLink, + EmptyExtraLinkTestOperator, + EmptyNoExtraLinkTestOperator, +) if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType @@ -115,12 +118,12 @@ def task_1(dag): @pytest.fixture(scope="module", autouse=True) def task_2(dag): - return Dummy2TestOperator(task_id="some_dummy_task_2", dag=dag) + return EmptyExtraLinkTestOperator(task_id="some_dummy_task_2", dag=dag) @pytest.fixture(scope="module", autouse=True) def task_3(dag): - return Dummy3TestOperator(task_id="some_dummy_task_3", dag=dag) + return EmptyNoExtraLinkTestOperator(task_id="some_dummy_task_3", dag=dag) @pytest.fixture(scope="module", autouse=True) diff --git a/tests/www/views/test_views_grid.py b/tests/www/views/test_views_grid.py index 4f05f2a13456b..d896520422493 100644 --- a/tests/www/views/test_views_grid.py +++ b/tests/www/views/test_views_grid.py @@ -35,10 +35,9 @@ from airflow.utils.task_group import TaskGroup from airflow.utils.types import DagRunType from airflow.www.views import dag_to_grid - -from dev.tests_common.test_utils.asserts import assert_queries_count -from dev.tests_common.test_utils.db import clear_db_assets, clear_db_runs -from dev.tests_common.test_utils.mock_operators import MockOperator +from tests_common.test_utils.asserts import assert_queries_count +from tests_common.test_utils.db import clear_db_assets, clear_db_runs +from tests_common.test_utils.mock_operators import MockOperator pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_home.py b/tests/www/views/test_views_home.py index 4de032c58176e..b02099a668008 100644 --- a/tests/www/views/test_views_home.py +++ b/tests/www/views/test_views_home.py @@ -27,14 +27,14 @@ from airflow.utils.state import State from airflow.www.utils import UIAlert from airflow.www.views import FILTER_LASTRUN_COOKIE, FILTER_STATUS_COOKIE, FILTER_TAGS_COOKIE - -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_import_errors, clear_db_serialized_dags -from dev.tests_common.test_utils.permissions import _resource_name -from dev.tests_common.test_utils.www import ( +from tests_common.test_utils.db import clear_db_dags, clear_db_import_errors, clear_db_serialized_dags +from tests_common.test_utils.permissions import _resource_name +from tests_common.test_utils.www import ( check_content_in_response, check_content_not_in_response, client_with_login, ) + from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_log.py b/tests/www/views/test_views_log.py index c025ebcbbe8ed..b0fdf6a01d10c 100644 --- a/tests/www/views/test_views_log.py +++ b/tests/www/views/test_views_log.py @@ -40,12 +40,11 @@ from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType from airflow.www.app import create_app - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs -from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules -from dev.tests_common.test_utils.www import client_with_login +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_dags, clear_db_runs +from tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from tests_common.test_utils.www import client_with_login if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_mount.py b/tests/www/views/test_views_mount.py index e58675439ec12..c20dc418d3a02 100644 --- a/tests/www/views/test_views_mount.py +++ b/tests/www/views/test_views_mount.py @@ -22,8 +22,7 @@ import werkzeug.wrappers from airflow.www.app import create_app - -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_paused.py b/tests/www/views/test_views_paused.py index 551cdd180e078..59eee1868b70e 100644 --- a/tests/www/views/test_views_paused.py +++ b/tests/www/views/test_views_paused.py @@ -19,8 +19,7 @@ import pytest from airflow.models.log import Log - -from dev.tests_common.test_utils.db import clear_db_dags +from tests_common.test_utils.db import clear_db_dags pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_pool.py b/tests/www/views/test_views_pool.py index 9b9ddfaeeb79e..7b7b20d57c9cf 100644 --- a/tests/www/views/test_views_pool.py +++ b/tests/www/views/test_views_pool.py @@ -23,8 +23,7 @@ from airflow.models import Pool from airflow.utils.session import create_session - -from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response +from tests_common.test_utils.www import check_content_in_response, check_content_not_in_response pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_rate_limit.py b/tests/www/views/test_views_rate_limit.py index d843c8ed54b09..eee9fed8dde00 100644 --- a/tests/www/views/test_views_rate_limit.py +++ b/tests/www/views/test_views_rate_limit.py @@ -20,10 +20,9 @@ import pytest from airflow.www.app import create_app - -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.decorators import dont_initialize_flask_app_submodules -from dev.tests_common.test_utils.www import client_with_login +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.decorators import dont_initialize_flask_app_submodules +from tests_common.test_utils.www import client_with_login pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_rendered.py b/tests/www/views/test_views_rendered.py index 6d96cdc4d625f..c23c300416a1f 100644 --- a/tests/www/views/test_views_rendered.py +++ b/tests/www/views/test_views_rendered.py @@ -34,15 +34,14 @@ from airflow.utils.session import create_session from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator -from dev.tests_common.test_utils.db import ( +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator +from tests_common.test_utils.db import ( clear_db_dags, clear_db_runs, clear_rendered_ti_fields, initial_db_init, ) -from dev.tests_common.test_utils.www import check_content_in_response, check_content_not_in_response +from tests_common.test_utils.www import check_content_in_response, check_content_not_in_response if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType diff --git a/tests/www/views/test_views_robots.py b/tests/www/views/test_views_robots.py index 41f963b3bf218..9f93ef5791f6f 100644 --- a/tests/www/views/test_views_robots.py +++ b/tests/www/views/test_views_robots.py @@ -18,7 +18,7 @@ import pytest -from dev.tests_common.test_utils.config import conf_vars +from tests_common.test_utils.config import conf_vars pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_task_norun.py b/tests/www/views/test_views_task_norun.py index 2ff6ff9d7cd67..4cb195461fccc 100644 --- a/tests/www/views/test_views_task_norun.py +++ b/tests/www/views/test_views_task_norun.py @@ -22,7 +22,7 @@ import pytest -from dev.tests_common.test_utils.db import clear_db_runs +from tests_common.test_utils.db import clear_db_runs pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_tasks.py b/tests/www/views/test_views_tasks.py index 4e9de5c00fc9c..b1a57ef290528 100644 --- a/tests/www/views/test_views_tasks.py +++ b/tests/www/views/test_views_tasks.py @@ -43,15 +43,15 @@ from airflow.utils.state import DagRunState, State from airflow.utils.types import DagRunType from airflow.www.views import TaskInstanceModelView, _safe_parse_datetime - -from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.db import clear_db_runs, clear_db_xcom -from dev.tests_common.test_utils.www import ( +from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS, BashOperator +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_runs, clear_db_xcom +from tests_common.test_utils.www import ( check_content_in_response, check_content_not_in_response, client_with_login, ) + from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import ( create_user, delete_roles, diff --git a/tests/www/views/test_views_trigger_dag.py b/tests/www/views/test_views_trigger_dag.py index d2c3817ede6db..f7e4d62599ea1 100644 --- a/tests/www/views/test_views_trigger_dag.py +++ b/tests/www/views/test_views_trigger_dag.py @@ -32,9 +32,9 @@ from airflow.utils.json import WebEncoder from airflow.utils.session import create_session from airflow.utils.types import DagRunType +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.www import check_content_in_response -from dev.tests_common.test_utils.config import conf_vars -from dev.tests_common.test_utils.www import check_content_in_response from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_test_client pytestmark = pytest.mark.db_test diff --git a/tests/www/views/test_views_variable.py b/tests/www/views/test_views_variable.py index dae1f2e9b7315..2f1ec81fef1d6 100644 --- a/tests/www/views/test_views_variable.py +++ b/tests/www/views/test_views_variable.py @@ -25,13 +25,13 @@ from airflow.models import Variable from airflow.security import permissions from airflow.utils.session import create_session - -from dev.tests_common.test_utils.www import ( +from tests_common.test_utils.www import ( _check_last_log, check_content_in_response, check_content_not_in_response, client_with_login, ) + from providers.tests.fab.auth_manager.api_endpoints.api_connexion_utils import create_user pytestmark = pytest.mark.db_test diff --git a/dev/tests_common/__init__.py b/tests_common/__init__.py similarity index 100% rename from dev/tests_common/__init__.py rename to tests_common/__init__.py diff --git a/dev/tests_common/_internals/__init__.py b/tests_common/_internals/__init__.py similarity index 100% rename from dev/tests_common/_internals/__init__.py rename to tests_common/_internals/__init__.py diff --git a/dev/tests_common/_internals/capture_warnings.py b/tests_common/_internals/capture_warnings.py similarity index 99% rename from dev/tests_common/_internals/capture_warnings.py rename to tests_common/_internals/capture_warnings.py index 16b220989b89b..aaae4e54f0ce7 100644 --- a/dev/tests_common/_internals/capture_warnings.py +++ b/tests_common/_internals/capture_warnings.py @@ -179,7 +179,7 @@ def pytest_runtest_protocol(self, item: pytest.Item): @pytest.hookimpl(hookwrapper=True, trylast=True) def pytest_sessionfinish(self, session: pytest.Session, exitstatus: int): - """Save warning captures in the session finish on xdist worker node""" + """Save warning captures in the session finish on xdist worker node.""" with CapturedWarning.capture_warnings("config", self.root_path, None) as records: yield self.add_captured_warnings(records) @@ -206,7 +206,7 @@ def pytest_testnodedown(self, node, error): @staticmethod def sorted_groupby(it, grouping_key: Callable): - """Helper for sort and group by.""" + """Sort and group by items by the grouping_key.""" for group, grouped_data in itertools.groupby(sorted(it, key=grouping_key), key=grouping_key): yield group, list(grouped_data) diff --git a/dev/tests_common/_internals/forbidden_warnings.py b/tests_common/_internals/forbidden_warnings.py similarity index 99% rename from dev/tests_common/_internals/forbidden_warnings.py rename to tests_common/_internals/forbidden_warnings.py index ce24878510667..0b0a11262b3b6 100644 --- a/dev/tests_common/_internals/forbidden_warnings.py +++ b/tests_common/_internals/forbidden_warnings.py @@ -81,7 +81,7 @@ def pytest_itemcollected(self, item: pytest.Item): @pytest.hookimpl(hookwrapper=True, trylast=True) def pytest_sessionfinish(self, session: pytest.Session, exitstatus: int): - """Save set of test node ids in the session finish on xdist worker node""" + """Save set of test node ids in the session finish on xdist worker node.""" yield if self.is_worker_node and self.detected_cases and hasattr(self.config, "workeroutput"): self.config.workeroutput[self.node_key] = frozenset(self.detected_cases) diff --git a/dev/tests_common/pyproject.toml b/tests_common/pyproject.toml similarity index 98% rename from dev/tests_common/pyproject.toml rename to tests_common/pyproject.toml index 092ad7c7c5f5b..7cb353285ddd7 100644 --- a/dev/tests_common/pyproject.toml +++ b/tests_common/pyproject.toml @@ -35,4 +35,4 @@ disable = true include = ["**/*.py"] [tool.hatch.build.targets.wheel.sources] -"" = "dev/tests_common" +"" = "tests_common" diff --git a/dev/tests_common/pytest_plugin.py b/tests_common/pytest_plugin.py similarity index 97% rename from dev/tests_common/pytest_plugin.py rename to tests_common/pytest_plugin.py index 5694fe1b3403f..60638d8a5a756 100644 --- a/dev/tests_common/pytest_plugin.py +++ b/tests_common/pytest_plugin.py @@ -34,8 +34,8 @@ if TYPE_CHECKING: from itsdangerous import URLSafeSerializer - from dev.tests_common._internals.capture_warnings import CaptureWarningsPlugin # noqa: F401 - from dev.tests_common._internals.forbidden_warnings import ForbiddenWarningsPlugin # noqa: F401 + from tests_common._internals.capture_warnings import CaptureWarningsPlugin # noqa: F401 + from tests_common._internals.forbidden_warnings import ForbiddenWarningsPlugin # noqa: F401 # https://docs.pytest.org/en/stable/reference/reference.html#stash capture_warnings_key = pytest.StashKey["CaptureWarningsPlugin"]() @@ -133,8 +133,7 @@ @pytest.fixture def reset_db(): - """Resets Airflow db.""" - + """Reset Airflow db.""" from airflow.utils import db db.resetdb() @@ -145,7 +144,7 @@ def reset_db(): @pytest.fixture(autouse=True) def trace_sql(request): - from dev.tests_common.test_utils.perf.perf_kit.sqlalchemy import ( # isort: skip + from tests_common.test_utils.perf.perf_kit.sqlalchemy import ( # isort: skip count_queries, trace_queries, ) @@ -325,10 +324,10 @@ def pytest_addoption(parser: pytest.Parser): @pytest.fixture(autouse=True, scope="session") def initialize_airflow_tests(request): - """Helper that setups Airflow testing environment.""" + """Set up Airflow testing environment.""" print(" AIRFLOW ".center(60, "=")) - from dev.tests_common.test_utils.db import initial_db_init + from tests_common.test_utils.db import initial_db_init # Setup test environment for breeze home = os.path.expanduser("~") @@ -340,7 +339,7 @@ def initialize_airflow_tests(request): lock_file = os.path.join(airflow_home, ".airflow_db_initialised") if not skip_db_tests: if request.config.option.db_init: - from dev.tests_common.test_utils.db import initial_db_init + from tests_common.test_utils.db import initial_db_init print("Initializing the DB - forced with --with-db-init switch.") initial_db_init() @@ -431,7 +430,7 @@ def pytest_configure(config: pytest.Config) -> None: forbidden_warnings: list[str] | None = config.getini("forbidden_warnings") if not config.option.disable_forbidden_warnings and forbidden_warnings: - from dev.tests_common._internals.forbidden_warnings import ForbiddenWarningsPlugin + from tests_common._internals.forbidden_warnings import ForbiddenWarningsPlugin forbidden_warnings_plugin = ForbiddenWarningsPlugin( config=config, @@ -441,7 +440,7 @@ def pytest_configure(config: pytest.Config) -> None: config.stash[forbidden_warnings_key] = forbidden_warnings_plugin if not config.option.disable_capture_warnings: - from dev.tests_common._internals.capture_warnings import CaptureWarningsPlugin + from tests_common._internals.capture_warnings import CaptureWarningsPlugin capture_warnings_plugin = CaptureWarningsPlugin( config=config, output_path=config.getoption("warning_output_path", default=None) @@ -678,7 +677,8 @@ def pytest_runtest_setup(item): @pytest.fixture def frozen_sleep(monkeypatch): - """Use time-machine to "stub" sleep. + """ + Use time-machine to "stub" sleep. This means the ``sleep()`` takes no time, but ``datetime.now()`` appears to move forwards. @@ -717,7 +717,8 @@ def fake_sleep(seconds): @pytest.fixture def dag_maker(request): - """Fixture to help create DAG, DagModel, and SerializedDAG automatically. + """ + Fixture to help create DAG, DagModel, and SerializedDAG automatically. You have to use the dag_maker as a context manager and it takes the same argument as DAG:: @@ -824,8 +825,7 @@ def create_dagrun(self, **kwargs): from airflow.utils import timezone from airflow.utils.state import State from airflow.utils.types import DagRunType - - from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS + from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType @@ -922,8 +922,7 @@ def cleanup(self): from airflow.models.serialized_dag import SerializedDagModel from airflow.models.taskmap import TaskMap from airflow.utils.retries import run_with_db_retries - - from dev.tests_common.test_utils.compat import AssetEvent + from tests_common.test_utils.compat import AssetEvent for attempt in run_with_db_retries(logger=self.log): with attempt: @@ -970,7 +969,8 @@ def cleanup(self): @pytest.fixture def create_dummy_dag(dag_maker): - """Create a `DAG` with a single `EmptyOperator` task. + """ + Create a `DAG` with a single `EmptyOperator` task. DagRun and DagModel is also created. @@ -1006,7 +1006,7 @@ def create_dag( **kwargs, ): op_kwargs = {} - from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS + from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS if AIRFLOW_V_2_9_PLUS: op_kwargs["task_display_name"] = task_display_name @@ -1038,7 +1038,8 @@ def create_dag( @pytest.fixture def create_task_instance(dag_maker, create_dummy_dag): - """Create a TaskInstance, and associated DB rows (DagRun, DagModel, etc). + """ + Create a TaskInstance, and associated DB rows (DagRun, DagModel, etc). Uses ``create_dummy_dag`` to create the dag structure. """ @@ -1068,7 +1069,7 @@ def maker( map_index=-1, **kwargs, ) -> TaskInstance: - from dev.tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS + from tests_common.test_utils.compat import AIRFLOW_V_3_0_PLUS if AIRFLOW_V_3_0_PLUS: from airflow.utils.types import DagRunTriggeredByType @@ -1079,7 +1080,7 @@ def maker( execution_date = timezone.utcnow() with dag_maker(dag_id, **kwargs): op_kwargs = {} - from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS + from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS if AIRFLOW_V_2_9_PLUS: op_kwargs["task_display_name"] = task_display_name @@ -1243,7 +1244,7 @@ def reset_logging_config(): def suppress_info_logs_for_dag_and_fab(): import logging - from dev.tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS + from tests_common.test_utils.compat import AIRFLOW_V_2_9_PLUS dag_logger = logging.getLogger("airflow.models.dag") dag_logger.setLevel(logging.WARNING) @@ -1259,7 +1260,7 @@ def suppress_info_logs_for_dag_and_fab(): @pytest.fixture(scope="module", autouse=True) def _clear_db(request): """Clear DB before each test module run.""" - from dev.tests_common.test_utils.db import clear_all, initial_db_init + from tests_common.test_utils.db import clear_all, initial_db_init if not request.config.option.db_cleanup: return @@ -1396,7 +1397,7 @@ def hook_lineage_collector(): @pytest.fixture def clean_dags_and_dagruns(): """Fixture that cleans the database before and after every test.""" - from dev.tests_common.test_utils.db import clear_db_dags, clear_db_runs + from tests_common.test_utils.db import clear_db_dags, clear_db_runs clear_db_runs() clear_db_dags() @@ -1407,7 +1408,7 @@ def clean_dags_and_dagruns(): @pytest.fixture(scope="session") def app(): - from dev.tests_common.test_utils.config import conf_vars + from tests_common.test_utils.config import conf_vars with conf_vars({("fab", "auth_rate_limited"): "False"}): from airflow.www import app diff --git a/dev/tests_common/test_utils/README.md b/tests_common/test_utils/README.md similarity index 100% rename from dev/tests_common/test_utils/README.md rename to tests_common/test_utils/README.md diff --git a/dev/tests_common/test_utils/__init__.py b/tests_common/test_utils/__init__.py similarity index 94% rename from dev/tests_common/test_utils/__init__.py rename to tests_common/test_utils/__init__.py index e440178fae6c7..c27a6b55209c1 100644 --- a/dev/tests_common/test_utils/__init__.py +++ b/tests_common/test_utils/__init__.py @@ -19,4 +19,4 @@ from pathlib import Path -AIRFLOW_MAIN_FOLDER = Path(__file__).parents[3] +AIRFLOW_MAIN_FOLDER = Path(__file__).parents[2] diff --git a/dev/tests_common/test_utils/api_connexion_utils.py b/tests_common/test_utils/api_connexion_utils.py similarity index 91% rename from dev/tests_common/test_utils/api_connexion_utils.py rename to tests_common/test_utils/api_connexion_utils.py index 2d273af2e6878..23a273d93557f 100644 --- a/dev/tests_common/test_utils/api_connexion_utils.py +++ b/tests_common/test_utils/api_connexion_utils.py @@ -20,8 +20,7 @@ from typing import TYPE_CHECKING from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP - -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0+", __file__): from airflow.providers.fab.auth_manager.security_manager.override import EXISTING_ROLES @@ -32,9 +31,7 @@ @contextmanager def create_test_client(app, user_name, role_name, permissions): - """ - Helper function to create a client with a temporary user which will be deleted once done - """ + """Create a client with a temporary user which will be deleted once done.""" client = app.test_client() with create_user_scope(app, username=user_name, role_name=role_name, permissions=permissions) as _: resp = client.post("/login/", data={"username": user_name, "password": user_name}) @@ -45,9 +42,10 @@ def create_test_client(app, user_name, role_name, permissions): @contextmanager def create_user_scope(app, username, **kwargs): """ - Helper function designed to be used with pytest fixture mainly. + Create user scope for use pytest fixture mainly. + It will create a user and provide it for the fixture via YIELD (generator) - then will tidy up once test is complete + then will tidy up once test is complete. """ test_user = create_user(app, username, **kwargs) diff --git a/dev/tests_common/test_utils/asserts.py b/tests_common/test_utils/asserts.py similarity index 95% rename from dev/tests_common/test_utils/asserts.py rename to tests_common/test_utils/asserts.py index 56bf8cc1fee57..59ca9bad968ea 100644 --- a/dev/tests_common/test_utils/asserts.py +++ b/tests_common/test_utils/asserts.py @@ -46,6 +46,8 @@ def _trim(s): class QueriesTraceRecord(NamedTuple): + """QueriesTraceRecord holds information about the query executed in the context.""" + module: str name: str lineno: int | None @@ -63,6 +65,8 @@ def __str__(self): class QueriesTraceInfo(NamedTuple): + """QueriesTraceInfo holds information about the queries executed in the context.""" + traces: tuple[QueriesTraceRecord, ...] @classmethod @@ -144,14 +148,15 @@ def assert_queries_count( session: Session | None = None, ): """ - Asserts that the number of queries is as expected with the margin applied + Assert that the number of queries is as expected with the margin applied. + The margin is helpful in case of complex cases where we do not want to change it every time we changed queries, but we want to catch cases where we spin out of control :param expected_count: expected number of queries :param message_fmt: message printed optionally if the number is exceeded :param margin: margin to add to expected number of calls :param stacklevel: limits the output stack trace to that numbers of frame - :param stacklevel_from_module: Filter stack trace from specific module + :param stacklevel_from_module: Filter stack trace from specific module. """ with count_queries( stacklevel=stacklevel, stacklevel_from_module=stacklevel_from_module, session=session diff --git a/dev/tests_common/test_utils/azure_system_helpers.py b/tests_common/test_utils/azure_system_helpers.py similarity index 93% rename from dev/tests_common/test_utils/azure_system_helpers.py rename to tests_common/test_utils/azure_system_helpers.py index b1a46617c082f..057124dea1f29 100644 --- a/dev/tests_common/test_utils/azure_system_helpers.py +++ b/tests_common/test_utils/azure_system_helpers.py @@ -28,9 +28,8 @@ from airflow.models import Connection from airflow.providers.microsoft.azure.hooks.fileshare import AzureFileShareHook from airflow.utils.process_utils import patch_environ - -from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER -from dev.tests_common.test_utils.system_tests_class import SystemTest +from tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from tests_common.test_utils.system_tests_class import SystemTest AZURE_DAG_FOLDER = os.path.join( AIRFLOW_MAIN_FOLDER, "airflow", "providers", "microsoft", "azure", "example_dags" @@ -44,7 +43,7 @@ @contextmanager def provide_wasb_default_connection(key_file_path: str): """ - Context manager to provide a temporary value for wasb_default connection + Context manager to provide a temporary value for wasb_default connection. :param key_file_path: Path to file with wasb_default credentials .json file. """ @@ -67,7 +66,8 @@ def provide_wasb_default_connection(key_file_path: str): @contextmanager def provide_azure_data_lake_default_connection(key_file_path: str): """ - Context manager to provide a temporary value for azure_data_lake_default connection + Provide a temporary value for azure_data_lake_default connection. + :param key_file_path: Path to file with azure_data_lake_default credentials .json file. """ required_fields = {"login", "password", "extra"} @@ -106,6 +106,8 @@ def provide_azure_fileshare(share_name: str, azure_fileshare_conn_id: str, file_ @pytest.mark.system("azure") class AzureSystemTest(SystemTest): + """Base class for Azure system tests.""" + @classmethod def create_share(cls, share_name: str, azure_fileshare_conn_id: str): hook = AzureFileShareHook(azure_fileshare_conn_id=azure_fileshare_conn_id) @@ -138,9 +140,7 @@ def upload_file_from_string( @classmethod def prepare_share(cls, share_name: str, azure_fileshare_conn_id: str, file_name: str, directory: str): - """ - Create share with a file in given directory. If directory is None, file is in root dir. - """ + """Create share with a file in given directory. If directory is None, file is in root dir.""" hook = AzureFileShareHook( azure_fileshare_conn_id=azure_fileshare_conn_id, share_name=share_name, diff --git a/dev/tests_common/test_utils/compat.py b/tests_common/test_utils/compat.py similarity index 99% rename from dev/tests_common/test_utils/compat.py rename to tests_common/test_utils/compat.py index 67a111350daff..bc04f798e027e 100644 --- a/dev/tests_common/test_utils/compat.py +++ b/tests_common/test_utils/compat.py @@ -116,7 +116,7 @@ def connection_to_dict( connection: Connection, *, prune_empty: bool = False, validate: bool = True ) -> dict[str, Any]: """ - Convert Connection to json-serializable dictionary (compatibility code for Airflow 2.7 tests) + Convert Connection to json-serializable dictionary (compatibility code for Airflow 2.7 tests). :param connection: connection to convert to dict :param prune_empty: Whether or not remove empty values. diff --git a/dev/tests_common/test_utils/config.py b/tests_common/test_utils/config.py similarity index 100% rename from dev/tests_common/test_utils/config.py rename to tests_common/test_utils/config.py diff --git a/dev/tests_common/test_utils/db.py b/tests_common/test_utils/db.py similarity index 97% rename from dev/tests_common/test_utils/db.py rename to tests_common/test_utils/db.py index 4b2dede05ead7..bf92c3a2582d8 100644 --- a/dev/tests_common/test_utils/db.py +++ b/tests_common/test_utils/db.py @@ -44,8 +44,7 @@ from airflow.security.permissions import RESOURCE_DAG_PREFIX from airflow.utils.db import add_default_pool_if_not_exists, create_default_connections, reflect_tables from airflow.utils.session import create_session - -from dev.tests_common.test_utils.compat import ( +from tests_common.test_utils.compat import ( AIRFLOW_V_2_10_PLUS, AssetDagRunQueue, AssetEvent, @@ -63,8 +62,7 @@ def initial_db_init(): from airflow.utils import db from airflow.www.extensions.init_appbuilder import init_appbuilder from airflow.www.extensions.init_auth_manager import get_auth_manager - - from dev.tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS + from tests_common.test_utils.compat import AIRFLOW_V_2_8_PLUS db.resetdb() db.bootstrap_dagbag() @@ -106,7 +104,7 @@ def clear_db_assets(): session.query(DagScheduleAssetReference).delete() session.query(TaskOutletAssetReference).delete() if AIRFLOW_V_2_10_PLUS: - from dev.tests_common.test_utils.compat import AssetAliasModel + from tests_common.test_utils.compat import AssetAliasModel session.query(AssetAliasModel).delete() diff --git a/dev/tests_common/test_utils/decorators.py b/tests_common/test_utils/decorators.py similarity index 100% rename from dev/tests_common/test_utils/decorators.py rename to tests_common/test_utils/decorators.py diff --git a/dev/tests_common/test_utils/fake_datetime.py b/tests_common/test_utils/fake_datetime.py similarity index 92% rename from dev/tests_common/test_utils/fake_datetime.py rename to tests_common/test_utils/fake_datetime.py index 6af41e15efc0c..3aba78e767077 100644 --- a/dev/tests_common/test_utils/fake_datetime.py +++ b/tests_common/test_utils/fake_datetime.py @@ -21,9 +21,7 @@ class FakeDatetime(datetime): - """ - A fake replacement for datetime that can be mocked for testing. - """ + """A fake replacement for datetime that can be mocked for testing.""" def __new__(cls, *args, **kwargs): return datetime.__new__(datetime, *args, **kwargs) diff --git a/dev/tests_common/test_utils/gcp_system_helpers.py b/tests_common/test_utils/gcp_system_helpers.py similarity index 95% rename from dev/tests_common/test_utils/gcp_system_helpers.py rename to tests_common/test_utils/gcp_system_helpers.py index e17679bd8eb56..82b16da8490b7 100644 --- a/dev/tests_common/test_utils/gcp_system_helpers.py +++ b/tests_common/test_utils/gcp_system_helpers.py @@ -30,10 +30,10 @@ import airflow.providers.google from airflow.providers.google.cloud.utils.credentials_provider import provide_gcp_conn_and_credentials +from tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from tests_common.test_utils.logging_command_executor import CommandExecutor +from tests_common.test_utils.system_tests_class import SystemTest -from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER -from dev.tests_common.test_utils.logging_command_executor import CommandExecutor -from dev.tests_common.test_utils.system_tests_class import SystemTest from providers.tests.google.cloud.utils.gcp_authenticator import GCP_GCS_KEY, GCP_SECRET_MANAGER_KEY GCP_DIR = Path(airflow.providers.google.__file__).parent @@ -49,7 +49,7 @@ def resolve_full_gcp_key_path(key: str) -> str: """ - Returns path full path to provided GCP key. + Return path full path to provided GCP key. :param key: Name of the GCP key, for example ``my_service.json`` :returns: Full path to the key @@ -66,7 +66,7 @@ def provide_gcp_context( project_id: str | None = None, ): """ - Context manager that provides: + Provide context manager for GCP. - GCP credentials for application supporting `Application Default Credentials (ADC) strategy `__. @@ -115,6 +115,8 @@ def provide_gcs_bucket(bucket_name: str): @pytest.mark.system("google") class GoogleSystemTest(SystemTest): + """Base class for Google system tests.""" + @staticmethod def execute_cmd(*args, **kwargs): executor = CommandExecutor() @@ -132,10 +134,7 @@ def _service_key(): def execute_with_ctx( cls, cmd: list[str], key: str = GCP_GCS_KEY, project_id=None, scopes=None, silent: bool = False ): - """ - Executes command with context created by provide_gcp_context and activated - service key. - """ + """Execute command with context created by provide_gcp_context and activated service key.""" current_project_id = project_id or cls._project_id() with provide_gcp_context(key, project_id=current_project_id, scopes=scopes): cls.execute_cmd(cmd=cmd, silent=silent) diff --git a/dev/tests_common/test_utils/get_all_tests.py b/tests_common/test_utils/get_all_tests.py similarity index 92% rename from dev/tests_common/test_utils/get_all_tests.py rename to tests_common/test_utils/get_all_tests.py index 5081243c9e93a..000f2e63b34bd 100644 --- a/dev/tests_common/test_utils/get_all_tests.py +++ b/tests_common/test_utils/get_all_tests.py @@ -16,9 +16,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -Gets all tests cases from xunit file. -""" +"""Gets all tests cases from xunit file.""" from __future__ import annotations @@ -28,7 +26,7 @@ def last_replace(s, old, new, number_of_occurrences): """ - Replaces last n occurrences of the old string with the new one within the string provided + Replace last n occurrences of the old string with the new one within the string provided. :param s: string to replace occurrences with :param old: old string @@ -42,7 +40,7 @@ def last_replace(s, old, new, number_of_occurrences): def print_all_cases(xunit_test_file_path): """ - Prints all test cases read from the xunit test file + Print all test cases read from the xunit test file. :param xunit_test_file_path: path of the xunit file :return: None diff --git a/dev/tests_common/test_utils/hdfs_utils.py b/tests_common/test_utils/hdfs_utils.py similarity index 95% rename from dev/tests_common/test_utils/hdfs_utils.py rename to tests_common/test_utils/hdfs_utils.py index f429c9ab0cf45..26eb3a78a47de 100644 --- a/dev/tests_common/test_utils/hdfs_utils.py +++ b/tests_common/test_utils/hdfs_utils.py @@ -19,6 +19,8 @@ class FakeWebHDFSHook: + """Fake WebHDFSHook for testing purposes.""" + def __init__(self, conn_id): self.conn_id = conn_id diff --git a/dev/tests_common/test_utils/logging_command_executor.py b/tests_common/test_utils/logging_command_executor.py similarity index 97% rename from dev/tests_common/test_utils/logging_command_executor.py rename to tests_common/test_utils/logging_command_executor.py index 1773cda30d6f6..d41a3ea1ad9dc 100644 --- a/dev/tests_common/test_utils/logging_command_executor.py +++ b/tests_common/test_utils/logging_command_executor.py @@ -25,10 +25,12 @@ class CommandExecutionError(Exception): - """Raise in case of error during command execution""" + """Raise in case of error during command execution.""" class LoggingCommandExecutor(LoggingMixin): + """Logging command executor.""" + def execute_cmd(self, cmd, silent=False, cwd=None, env=None): if silent: self.log.info("Executing in silent mode: '%s'", " ".join(shlex.quote(c) for c in cmd)) @@ -70,6 +72,8 @@ def check_output(self, cmd): class CommandExecutor(LoggingCommandExecutor): """ + Command executor. + Due to 'LoggingCommandExecutor' class just returns the status code of command execution ('execute_cmd' method) and continues to perform code with possible errors, separate inherited 'CommandExecutor' class was created to use it if you need to break code performing diff --git a/dev/tests_common/test_utils/mapping.py b/tests_common/test_utils/mapping.py similarity index 100% rename from dev/tests_common/test_utils/mapping.py rename to tests_common/test_utils/mapping.py diff --git a/dev/tests_common/test_utils/mock_executor.py b/tests_common/test_utils/mock_executor.py similarity index 97% rename from dev/tests_common/test_utils/mock_executor.py rename to tests_common/test_utils/mock_executor.py index 5080290500c9c..506c0447589ac 100644 --- a/dev/tests_common/test_utils/mock_executor.py +++ b/tests_common/test_utils/mock_executor.py @@ -28,9 +28,7 @@ class MockExecutor(BaseExecutor): - """ - TestExecutor is used for unit testing purposes. - """ + """TestExecutor is used for unit testing purposes.""" supports_pickling = False mock_module_path = "mock.executor.path" @@ -98,6 +96,8 @@ def change_state(self, key, state, info=None): def mock_task_fail(self, dag_id, task_id, run_id: str, try_number=1): """ + Mock for test failures. + Set the mock outcome of running this particular task instances to FAILED. diff --git a/dev/tests_common/test_utils/mock_operators.py b/tests_common/test_utils/mock_operators.py similarity index 84% rename from dev/tests_common/test_utils/mock_operators.py rename to tests_common/test_utils/mock_operators.py index 0df0afec824c3..7cc807a3a485b 100644 --- a/dev/tests_common/test_utils/mock_operators.py +++ b/tests_common/test_utils/mock_operators.py @@ -23,8 +23,7 @@ from airflow.models.baseoperator import BaseOperator from airflow.models.xcom import XCom - -from dev.tests_common.test_utils.compat import BaseOperatorLink +from tests_common.test_utils.compat import BaseOperatorLink if TYPE_CHECKING: import jinja2 @@ -47,12 +46,16 @@ def execute(self, context: Context): class NestedFields: + """Nested fields for testing purposes.""" + def __init__(self, field_1, field_2): self.field_1 = field_1 self.field_2 = field_2 class MockOperatorWithNestedFields(BaseOperator): + """Operator with nested fields for testing purposes.""" + template_fields: Sequence[str] = ("arg1", "arg2") def __init__(self, arg1: str = "", arg2: NestedFields | None = None, **kwargs): @@ -85,9 +88,7 @@ def execute(self, context: Context): class AirflowLink(BaseOperatorLink): - """ - Operator Link for Apache Airflow Website - """ + """Operator Link for Apache Airflow Website.""" name = "airflow" @@ -95,19 +96,23 @@ def get_link(self, operator, *, ti_key): return "https://airflow.apache.org" -class Dummy2TestOperator(BaseOperator): +class EmptyExtraLinkTestOperator(BaseOperator): """ + Empty test operator with extra link. + Example of an Operator that has an extra operator link - and will be overridden by the one defined in tests/plugins/test_plugin.py + and will be overridden by the one defined in tests/plugins/test_plugin.py. """ operator_extra_links = (AirflowLink(),) -class Dummy3TestOperator(BaseOperator): +class EmptyNoExtraLinkTestOperator(BaseOperator): """ + Empty test operator without extra operator link. + Example of an operator that has no extra Operator link. - An operator link would be added to this operator via Airflow plugin + An operator link would be added to this operator via Airflow plugin. """ operator_extra_links = () @@ -115,6 +120,8 @@ class Dummy3TestOperator(BaseOperator): @attr.s(auto_attribs=True) class CustomBaseIndexOpLink(BaseOperatorLink): + """Custom Operator Link for Google BigQuery Console.""" + index: int = attr.ib() @property @@ -134,6 +141,8 @@ def get_link(self, operator, *, ti_key): class CustomOpLink(BaseOperatorLink): + """Custom Operator with Link for Google Custom Search.""" + name = "Google Custom" def get_link(self, operator, *, ti_key): @@ -146,14 +155,14 @@ def get_link(self, operator, *, ti_key): class CustomOperator(BaseOperator): + """Custom Operator for testing purposes.""" + template_fields = ["bash_command"] custom_operator_name = "@custom" @property def operator_extra_links(self): - """ - Return operator extra links - """ + """Return operator extra links.""" if isinstance(self.bash_command, str) or self.bash_command is None: return (CustomOpLink(),) return (CustomBaseIndexOpLink(i) for i, _ in enumerate(self.bash_command)) @@ -168,33 +177,27 @@ def execute(self, context: Context): class GoogleLink(BaseOperatorLink): - """ - Operator Link for Apache Airflow Website for Google - """ + """Operator Link for Apache Airflow Website for Google.""" name = "google" - operators = [Dummy3TestOperator, CustomOperator] + operators = [EmptyNoExtraLinkTestOperator, CustomOperator] def get_link(self, operator, *, ti_key): return "https://www.google.com" class AirflowLink2(BaseOperatorLink): - """ - Operator Link for Apache Airflow Website for 1.10.5 - """ + """Operator Link for Apache Airflow Website for 1.10.5.""" name = "airflow" - operators = [Dummy2TestOperator, Dummy3TestOperator] + operators = [EmptyExtraLinkTestOperator, EmptyNoExtraLinkTestOperator] def get_link(self, operator, *, ti_key): return "https://airflow.apache.org/1.10.5/" class GithubLink(BaseOperatorLink): - """ - Operator Link for Apache Airflow GitHub - """ + """Operator Link for Apache Airflow GitHub.""" name = "github" @@ -203,6 +206,8 @@ def get_link(self, operator, *, ti_key): class DeprecatedOperator(BaseOperator): + """Deprecated Operator for testing purposes.""" + def __init__(self, **kwargs): warnings.warn("This operator is deprecated.", DeprecationWarning, stacklevel=2) super().__init__(**kwargs) diff --git a/dev/tests_common/test_utils/mock_plugins.py b/tests_common/test_utils/mock_plugins.py similarity index 100% rename from dev/tests_common/test_utils/mock_plugins.py rename to tests_common/test_utils/mock_plugins.py diff --git a/dev/tests_common/test_utils/mock_security_manager.py b/tests_common/test_utils/mock_security_manager.py similarity index 89% rename from dev/tests_common/test_utils/mock_security_manager.py rename to tests_common/test_utils/mock_security_manager.py index 6b9f45e3d8410..ee0f49372afa9 100644 --- a/dev/tests_common/test_utils/mock_security_manager.py +++ b/tests_common/test_utils/mock_security_manager.py @@ -16,13 +16,15 @@ # under the License. from __future__ import annotations -from dev.tests_common.test_utils.compat import ignore_provider_compatibility_error +from tests_common.test_utils.compat import ignore_provider_compatibility_error with ignore_provider_compatibility_error("2.9.0", __file__): from airflow.providers.fab.auth_manager.security_manager.override import FabAirflowSecurityManagerOverride class MockSecurityManager(FabAirflowSecurityManagerOverride): + """Mock Security Manager for testing purposes.""" + VIEWER_VMS = { "Airflow", } diff --git a/dev/tests_common/test_utils/operators/__init__.py b/tests_common/test_utils/operators/__init__.py similarity index 100% rename from dev/tests_common/test_utils/operators/__init__.py rename to tests_common/test_utils/operators/__init__.py diff --git a/dev/tests_common/test_utils/operators/postgres_local_executor.cfg b/tests_common/test_utils/operators/postgres_local_executor.cfg similarity index 100% rename from dev/tests_common/test_utils/operators/postgres_local_executor.cfg rename to tests_common/test_utils/operators/postgres_local_executor.cfg diff --git a/dev/tests_common/test_utils/perf/__init__.py b/tests_common/test_utils/perf/__init__.py similarity index 100% rename from dev/tests_common/test_utils/perf/__init__.py rename to tests_common/test_utils/perf/__init__.py diff --git a/dev/tests_common/test_utils/perf/perf_kit/__init__.py b/tests_common/test_utils/perf/perf_kit/__init__.py similarity index 99% rename from dev/tests_common/test_utils/perf/perf_kit/__init__.py rename to tests_common/test_utils/perf/perf_kit/__init__.py index 011276155429f..84f0f335b1b19 100644 --- a/dev/tests_common/test_utils/perf/perf_kit/__init__.py +++ b/tests_common/test_utils/perf/perf_kit/__init__.py @@ -15,13 +15,10 @@ # specific language governing permissions and limitations # under the License. """ - -Perf-kit -======== +Perf-kit. Useful decorators and context managers used when testing the performance of various Airflow components. - Content ======== diff --git a/dev/tests_common/test_utils/perf/perf_kit/memory.py b/tests_common/test_utils/perf/perf_kit/memory.py similarity index 94% rename from dev/tests_common/test_utils/perf/perf_kit/memory.py rename to tests_common/test_utils/perf/perf_kit/memory.py index b236630f5f6c7..195a22d0bd9ff 100644 --- a/dev/tests_common/test_utils/perf/perf_kit/memory.py +++ b/tests_common/test_utils/perf/perf_kit/memory.py @@ -37,7 +37,7 @@ def _human_readable_size(size, decimal_places=3): class TraceMemoryResult: - """Trace results of memory,""" + """Trace results of memory,.""" def __init__(self): self.before = 0 @@ -48,7 +48,7 @@ def __init__(self): @contextmanager def trace_memory(human_readable=True, gc_collect=False): """ - Calculates the amount of difference in free memory before and after script execution. + Decorate function and calculate the amount of difference in free memory before and after script execution. In other words, how much data the code snippet has used up memory. diff --git a/dev/tests_common/test_utils/perf/perf_kit/python.py b/tests_common/test_utils/perf/perf_kit/python.py similarity index 90% rename from dev/tests_common/test_utils/perf/perf_kit/python.py rename to tests_common/test_utils/perf/perf_kit/python.py index 2589d9753451b..054fd0c0897a3 100644 --- a/dev/tests_common/test_utils/perf/perf_kit/python.py +++ b/tests_common/test_utils/perf/perf_kit/python.py @@ -30,8 +30,9 @@ @contextlib.contextmanager def pyspy(): """ - This decorator provide deterministic profiling. It generate and save flame graph to file. It uses``pyspy`` - internally. + Decorate methods for deterministic profiling. + + It generates and saves flame graph to file. It uses``pyspy` internally. Running py-spy inside of a docker container will also usually bring up a permissions denied error even when running as root. @@ -65,8 +66,9 @@ def pyspy(): @contextlib.contextmanager def profiled(print_callers=False): """ - This decorator provide deterministic profiling. It uses ``cProfile`` internally. It generates statistic - and print on the screen. + Decorate function with deterministic profiling. + + It uses ``cProfile`` internally. It generates statistics and prints on the screen. """ profile = cProfile.Profile() profile.enable() @@ -86,10 +88,7 @@ def profiled(print_callers=False): if __name__ == "__main__": def case(): - """ - Load modules. - :return: - """ + """Load modules.""" import logging import airflow diff --git a/dev/tests_common/test_utils/perf/perf_kit/repeat_and_time.py b/tests_common/test_utils/perf/perf_kit/repeat_and_time.py similarity index 92% rename from dev/tests_common/test_utils/perf/perf_kit/repeat_and_time.py rename to tests_common/test_utils/perf/perf_kit/repeat_and_time.py index 290cdeabc8766..ef179d8ea835f 100644 --- a/dev/tests_common/test_utils/perf/perf_kit/repeat_and_time.py +++ b/tests_common/test_utils/perf/perf_kit/repeat_and_time.py @@ -36,7 +36,7 @@ def __init__(self): @contextlib.contextmanager def timing(repeat_count: int = 1): """ - Measures code execution time. + Measure code execution time. :param repeat_count: If passed, the result will be divided by the value. """ @@ -60,7 +60,7 @@ def timing(repeat_count: int = 1): def repeat(repeat_count=5): """ - Function decorators that repeat function many times. + Decorate functions that repeats many times. :param repeat_count: The repeat count """ @@ -79,13 +79,13 @@ def wrap(*args, **kwargs): class TimeoutException(Exception): - """Exception when the test timeo uts""" + """The exception raised when the test timeouts.""" @contextlib.contextmanager def timeout(seconds=1): """ - Executes code only limited seconds. If the code does not end during this time, it will be interrupted. + Execute code only limited seconds. If the code does not end during this time, it will be interrupted. :param seconds: Number of seconds """ @@ -114,7 +114,7 @@ def handle_timeout(signum, frame): if __name__ == "__main__": def monte_carlo(total=10000): - """Monte Carlo""" + """Monte Carlo.""" inside = 0 for _ in range(total): @@ -140,7 +140,7 @@ def monte_carlo(total=10000): @repeat(REPEAT_COUNT) @timing() def get_pi(): - """Returns PI value:""" + """Return PI value.""" return monte_carlo() res = get_pi() diff --git a/dev/tests_common/test_utils/perf/perf_kit/sqlalchemy.py b/tests_common/test_utils/perf/perf_kit/sqlalchemy.py similarity index 97% rename from dev/tests_common/test_utils/perf/perf_kit/sqlalchemy.py rename to tests_common/test_utils/perf/perf_kit/sqlalchemy.py index f7fe68d779371..ff17f17b1962b 100644 --- a/dev/tests_common/test_utils/perf/perf_kit/sqlalchemy.py +++ b/tests_common/test_utils/perf/perf_kit/sqlalchemy.py @@ -35,7 +35,7 @@ def _pretty_format_sql(text: str): class TraceQueries: """ - Tracking SQL queries in a code block. + Track SQL queries in a code block. :param display_num: If True, displays the query number. :param display_time: If True, displays the query execution time. @@ -73,7 +73,7 @@ def before_cursor_execute( executemany, ): """ - Executed before cursor. + Execute before cursor. :param conn: connection :param cursor: cursor @@ -83,7 +83,6 @@ def before_cursor_execute( :param executemany: whether many statements executed :return: """ - conn.info.setdefault("query_start_time", []).append(time.monotonic()) self.query_count += 1 @@ -97,7 +96,7 @@ def after_cursor_execute( executemany, ): """ - Executed after cursor. + Execute after cursor. :param conn: connection :param cursor: cursor @@ -154,9 +153,7 @@ def __exit__(self, type_, value, traceback): class CountQueriesResult: - """ - Counter for number of queries. - """ + """Counter for number of queries.""" def __init__(self): self.count = 0 @@ -198,7 +195,7 @@ def after_cursor_execute( executemany, ): """ - Executed after cursor. + Execute after cursor. :param conn: connection :param cursor: cursor @@ -215,7 +212,7 @@ def after_cursor_execute( if __name__ == "__main__": # Example: def case(): - """Case of logging om/""" + """Case of logging om/.""" import logging from unittest import mock diff --git a/dev/tests_common/test_utils/permissions.py b/tests_common/test_utils/permissions.py similarity index 93% rename from dev/tests_common/test_utils/permissions.py rename to tests_common/test_utils/permissions.py index 46e964c1f6190..3abb5d0fbcdab 100644 --- a/dev/tests_common/test_utils/permissions.py +++ b/tests_common/test_utils/permissions.py @@ -21,6 +21,8 @@ def _resource_name(dag_id: str, resource_name: str) -> str: """ + Return resource name for given dag_id and resource_name in backwards compatible way. + This method is to keep compatibility with new FAB versions running with old airflow versions. """ diff --git a/dev/tests_common/test_utils/providers.py b/tests_common/test_utils/providers.py similarity index 94% rename from dev/tests_common/test_utils/providers.py rename to tests_common/test_utils/providers.py index 02ca9bed41989..34288d8680d29 100644 --- a/dev/tests_common/test_utils/providers.py +++ b/tests_common/test_utils/providers.py @@ -21,7 +21,7 @@ def object_exists(path: str): - """Returns true if importable python object is there.""" + """Return true if importable python object is there.""" from airflow.utils.module_loading import import_string try: @@ -33,7 +33,7 @@ def object_exists(path: str): def get_provider_version(provider_name): """ - Returns provider version given provider package name. + Return provider version given provider package name. Example:: if provider_version("apache-airflow-providers-cncf-kubernetes") >= (6, 0): diff --git a/dev/tests_common/test_utils/remote_user_api_auth_backend.py b/tests_common/test_utils/remote_user_api_auth_backend.py similarity index 97% rename from dev/tests_common/test_utils/remote_user_api_auth_backend.py rename to tests_common/test_utils/remote_user_api_auth_backend.py index 59df201e530e4..74f705bcf709a 100644 --- a/dev/tests_common/test_utils/remote_user_api_auth_backend.py +++ b/tests_common/test_utils/remote_user_api_auth_backend.py @@ -46,7 +46,7 @@ def _lookup_user(username: str): def requires_authentication(function: T): - """Decorator for functions that require authentication""" + """Decorate functions that require authentication.""" @wraps(function) def decorated(*args, **kwargs): diff --git a/dev/tests_common/test_utils/reset_warning_registry.py b/tests_common/test_utils/reset_warning_registry.py similarity index 96% rename from dev/tests_common/test_utils/reset_warning_registry.py rename to tests_common/test_utils/reset_warning_registry.py index d7d5c1ffe577b..4c6ae8d26b1a2 100644 --- a/dev/tests_common/test_utils/reset_warning_registry.py +++ b/tests_common/test_utils/reset_warning_registry.py @@ -34,7 +34,9 @@ # https://stackoverflow.com/questions/19428761/python-showing-once-warnings-again-resetting-all-warning-registries class reset_warning_registry: """ - context manager which archives & clears warning registry for duration of + Reset warning registry context manager. + + The context manager archives & clears warning registry for duration of context. :param pattern: diff --git a/dev/tests_common/test_utils/salesforce_system_helpers.py b/tests_common/test_utils/salesforce_system_helpers.py similarity index 100% rename from dev/tests_common/test_utils/salesforce_system_helpers.py rename to tests_common/test_utils/salesforce_system_helpers.py diff --git a/dev/tests_common/test_utils/sftp_system_helpers.py b/tests_common/test_utils/sftp_system_helpers.py similarity index 99% rename from dev/tests_common/test_utils/sftp_system_helpers.py rename to tests_common/test_utils/sftp_system_helpers.py index baed591b9004b..a599f120cb51b 100644 --- a/dev/tests_common/test_utils/sftp_system_helpers.py +++ b/tests_common/test_utils/sftp_system_helpers.py @@ -30,7 +30,7 @@ @contextmanager def provide_sftp_default_connection(key_file_path: str): """ - Context manager to provide a temporary value for sftp_default connection + Context manager to provide a temporary value for sftp_default connection. :param key_file_path: Path to file with sftp_default credentials .json file. """ diff --git a/dev/tests_common/test_utils/system_tests.py b/tests_common/test_utils/system_tests.py similarity index 100% rename from dev/tests_common/test_utils/system_tests.py rename to tests_common/test_utils/system_tests.py diff --git a/dev/tests_common/test_utils/system_tests_class.py b/tests_common/test_utils/system_tests_class.py similarity index 95% rename from dev/tests_common/test_utils/system_tests_class.py rename to tests_common/test_utils/system_tests_class.py index 5abdca96bee06..61f0b71d4c781 100644 --- a/dev/tests_common/test_utils/system_tests_class.py +++ b/tests_common/test_utils/system_tests_class.py @@ -28,9 +28,8 @@ from airflow.configuration import AIRFLOW_HOME, AirflowConfigParser, get_airflow_config from airflow.exceptions import AirflowException - -from dev.tests_common.test_utils import AIRFLOW_MAIN_FOLDER -from dev.tests_common.test_utils.logging_command_executor import get_executor +from tests_common.test_utils import AIRFLOW_MAIN_FOLDER +from tests_common.test_utils.logging_command_executor import get_executor DEFAULT_DAG_FOLDER = os.path.join(AIRFLOW_MAIN_FOLDER, "airflow", "example_dags") @@ -42,9 +41,7 @@ def get_default_logs_if_none(logs: str | None) -> str: def resolve_logs_folder() -> str: - """ - Returns LOGS folder specified in current Airflow config. - """ + """Return LOGS folder specified in current Airflow config.""" config_file = get_airflow_config(AIRFLOW_HOME) conf = AirflowConfigParser() conf.read(config_file) @@ -59,6 +56,8 @@ def resolve_logs_folder() -> str: class SystemTest: + """Base class for system tests.""" + log: logging.Logger @staticmethod @@ -70,6 +69,8 @@ def setup_logger(request): @pytest.fixture(autouse=True) def setup_system(self): """ + Set up system tests. + We want to avoid random errors while database got reset - those Are apparently triggered by parser trying to parse DAGs while The tables are dropped. We move the dags temporarily out of the dags folder diff --git a/dev/tests_common/test_utils/terraform.py b/tests_common/test_utils/terraform.py similarity index 93% rename from dev/tests_common/test_utils/terraform.py rename to tests_common/test_utils/terraform.py index b600ef6643b19..387d8cb28cafc 100644 --- a/dev/tests_common/test_utils/terraform.py +++ b/tests_common/test_utils/terraform.py @@ -16,10 +16,12 @@ # under the License. from __future__ import annotations -from dev.tests_common.test_utils.system_tests_class import SystemTest +from tests_common.test_utils.system_tests_class import SystemTest class Terraform(SystemTest): + """Base class for Terraform tests.""" + TERRAFORM_DIR: str def setup_method(self) -> None: diff --git a/dev/tests_common/test_utils/timetables.py b/tests_common/test_utils/timetables.py similarity index 97% rename from dev/tests_common/test_utils/timetables.py rename to tests_common/test_utils/timetables.py index 849968e76110a..f2053d83b24ac 100644 --- a/dev/tests_common/test_utils/timetables.py +++ b/tests_common/test_utils/timetables.py @@ -30,6 +30,8 @@ def delta_timetable(delta) -> DeltaDataIntervalTimetable: class CustomSerializationTimetable(Timetable): + """Custom timetable for testing serialization.""" + def __init__(self, value: str): self.value = value diff --git a/dev/tests_common/test_utils/watcher.py b/tests_common/test_utils/watcher.py similarity index 86% rename from dev/tests_common/test_utils/watcher.py rename to tests_common/test_utils/watcher.py index 4ee2e2e665006..6c6402cc6bd38 100644 --- a/dev/tests_common/test_utils/watcher.py +++ b/tests_common/test_utils/watcher.py @@ -23,6 +23,10 @@ @task(trigger_rule=TriggerRule.ONE_FAILED, retries=0) def watcher(): - """Watcher task raises an AirflowException and is used to 'watch' tasks for failures - and propagates fail status to the whole DAG Run""" + """ + Watcher task. + + Watcher task raises an AirflowException and is used to 'watch' tasks for failures + and propagates fail status to the whole DAG Run. + """ raise AirflowException("Failing task because one or more upstream tasks failed.") diff --git a/dev/tests_common/test_utils/www.py b/tests_common/test_utils/www.py similarity index 100% rename from dev/tests_common/test_utils/www.py rename to tests_common/test_utils/www.py From cc0aad0bab2d7ed55d8128f8e75ae9dbf2789c28 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 15 Oct 2024 11:29:40 +0800 Subject: [PATCH 120/125] Add AssetActive model (#42612) --- airflow/api_fastapi/views/ui/assets.py | 2 +- airflow/dag_processing/collection.py | 20 +- airflow/jobs/scheduler_job_runner.py | 21 +- ...5_3_0_0_add_name_field_to_dataset_model.py | 4 +- .../versions/0037_3_0_0_add_asset_active.py | 88 ++ airflow/models/asset.py | 59 +- airflow/models/dag.py | 1 + airflow/serialization/pydantic/asset.py | 1 - airflow/utils/db.py | 2 +- airflow/www/views.py | 4 +- docs/apache-airflow/img/airflow_erd.sha256 | 2 +- docs/apache-airflow/img/airflow_erd.svg | 1253 +++++++++-------- docs/apache-airflow/migrations-ref.rst | 4 +- tests/jobs/test_scheduler_job.py | 21 +- tests/models/test_dag.py | 12 +- tests/utils/test_db_cleanup.py | 1 + tests/www/views/test_views_dataset.py | 116 +- 17 files changed, 878 insertions(+), 733 deletions(-) create mode 100644 airflow/migrations/versions/0037_3_0_0_add_asset_active.py diff --git a/airflow/api_fastapi/views/ui/assets.py b/airflow/api_fastapi/views/ui/assets.py index 4a4ad1d0df9b4..1a198745f11f3 100644 --- a/airflow/api_fastapi/views/ui/assets.py +++ b/airflow/api_fastapi/views/ui/assets.py @@ -77,7 +77,7 @@ async def next_run_assets( ), isouter=True, ) - .where(DagScheduleAssetReference.dag_id == dag_id, ~AssetModel.is_orphaned) + .where(DagScheduleAssetReference.dag_id == dag_id, AssetModel.active.has()) .group_by(AssetModel.id, AssetModel.uri) .order_by(AssetModel.uri) ) diff --git a/airflow/dag_processing/collection.py b/airflow/dag_processing/collection.py index c8ce5dc873afa..f68ffbf331dd2 100644 --- a/airflow/dag_processing/collection.py +++ b/airflow/dag_processing/collection.py @@ -31,13 +31,13 @@ import logging from typing import TYPE_CHECKING, NamedTuple -from sqlalchemy import func, select +from sqlalchemy import func, select, tuple_ from sqlalchemy.orm import joinedload, load_only -from sqlalchemy.sql import expression from airflow.assets import Asset, AssetAlias from airflow.assets.manager import asset_manager from airflow.models.asset import ( + AssetActive, AssetAliasModel, AssetModel, DagScheduleAssetAliasReference, @@ -298,8 +298,6 @@ def add_assets(self, *, session: Session) -> dict[str, AssetModel]: orm_assets: dict[str, AssetModel] = { am.uri: am for am in session.scalars(select(AssetModel).where(AssetModel.uri.in_(self.assets))) } - for model in orm_assets.values(): - model.is_orphaned = expression.false() orm_assets.update( (model.uri, model) for model in asset_manager.create_assets( @@ -328,6 +326,20 @@ def add_asset_aliases(self, *, session: Session) -> dict[str, AssetAliasModel]: ) return orm_aliases + def add_asset_active_references(self, assets: Collection[AssetModel], *, session: Session) -> None: + existing_entries = set( + session.execute( + select(AssetActive.name, AssetActive.uri).where( + tuple_(AssetActive.name, AssetActive.uri).in_((asset.name, asset.uri) for asset in assets) + ) + ) + ) + session.add_all( + AssetActive.for_asset(asset) + for asset in assets + if (asset.name, asset.uri) not in existing_entries + ) + def add_dag_asset_references( self, dags: dict[str, DagModel], diff --git a/airflow/jobs/scheduler_job_runner.py b/airflow/jobs/scheduler_job_runner.py index f6821f57aa83c..30f58885a9aa2 100644 --- a/airflow/jobs/scheduler_job_runner.py +++ b/airflow/jobs/scheduler_job_runner.py @@ -45,6 +45,7 @@ from airflow.jobs.job import Job, perform_heartbeat from airflow.models import Log from airflow.models.asset import ( + AssetActive, AssetDagRunQueue, AssetEvent, AssetModel, @@ -2062,15 +2063,14 @@ def _cleanup_stale_dags(self, session: Session = NEW_SESSION) -> None: SerializedDagModel.remove_dag(dag_id=dag.dag_id, session=session) session.flush() - def _set_orphaned(self, asset: AssetModel) -> int: - self.log.info("Orphaning unreferenced asset '%s'", asset.uri) - asset.is_orphaned = expression.true() - return 1 + def _get_orphaning_identifier(self, asset: AssetModel) -> tuple[str, str]: + self.log.info("Orphaning unreferenced %s", asset) + return asset.name, asset.uri @provide_session def _orphan_unreferenced_assets(self, session: Session = NEW_SESSION) -> None: """ - Detect orphaned assets and set is_orphaned flag to True. + Detect orphaned assets and remove their active entry. An orphaned asset is no longer referenced in any DAG schedule parameters or task outlets. """ @@ -2085,7 +2085,7 @@ def _orphan_unreferenced_assets(self, session: Session = NEW_SESSION) -> None: isouter=True, ) .group_by(AssetModel.id) - .where(~AssetModel.is_orphaned) + .where(AssetModel.active.has()) .having( and_( func.count(DagScheduleAssetReference.dag_id) == 0, @@ -2094,8 +2094,13 @@ def _orphan_unreferenced_assets(self, session: Session = NEW_SESSION) -> None: ) ) - updated_count = sum(self._set_orphaned(asset) for asset in orphaned_asset_query) - Stats.gauge("asset.orphaned", updated_count) + orphaning_identifiers = [self._get_orphaning_identifier(asset) for asset in orphaned_asset_query] + session.execute( + delete(AssetActive).where( + tuple_in_condition((AssetActive.name, AssetActive.uri), orphaning_identifiers) + ) + ) + Stats.gauge("asset.orphaned", len(orphaning_identifiers)) def _executor_to_tis(self, tis: list[TaskInstance]) -> dict[BaseExecutor, list[TaskInstance]]: """Organize TIs into lists per their respective executor.""" diff --git a/airflow/migrations/versions/0035_3_0_0_add_name_field_to_dataset_model.py b/airflow/migrations/versions/0035_3_0_0_add_name_field_to_dataset_model.py index 6016dd9658908..2460b6956cff6 100644 --- a/airflow/migrations/versions/0035_3_0_0_add_name_field_to_dataset_model.py +++ b/airflow/migrations/versions/0035_3_0_0_add_name_field_to_dataset_model.py @@ -63,7 +63,9 @@ def upgrade(): batch_op.add_column(sa.Column("name", _STRING_COLUMN_TYPE)) batch_op.add_column(sa.Column("group", _STRING_COLUMN_TYPE, default=str, nullable=False)) # Fill name from uri column. - Session(bind=op.get_bind()).execute(sa.text("update dataset set name=uri")) + with Session(bind=op.get_bind()) as session: + session.execute(sa.text("update dataset set name=uri")) + session.commit() # Set the name column non-nullable. # Now with values in there, we can create the new unique constraint and index. # Due to MySQL restrictions, we are also reducing the length on uri. diff --git a/airflow/migrations/versions/0037_3_0_0_add_asset_active.py b/airflow/migrations/versions/0037_3_0_0_add_asset_active.py new file mode 100644 index 0000000000000..422bc440dba85 --- /dev/null +++ b/airflow/migrations/versions/0037_3_0_0_add_asset_active.py @@ -0,0 +1,88 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add AssetActive to track orphaning instead of a flag. + +Revision ID: 5a5d66100783 +Revises: c3389cd7793f +Create Date: 2024-10-01 08:39:48.997198 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.orm import Session + +# revision identifiers, used by Alembic. +revision = "5a5d66100783" +down_revision = "c3389cd7793f" +branch_labels = None +depends_on = None +airflow_version = "3.0.0" + +_STRING_COLUMN_TYPE = sa.String(length=1500).with_variant( + sa.String(length=1500, collation="latin1_general_cs"), + dialect_name="mysql", +) + + +def upgrade(): + op.create_table( + "asset_active", + sa.Column("name", _STRING_COLUMN_TYPE, nullable=False), + sa.Column("uri", _STRING_COLUMN_TYPE, nullable=False), + sa.PrimaryKeyConstraint("name", "uri", name="asset_active_pkey"), + sa.ForeignKeyConstraint( + columns=["name", "uri"], + refcolumns=["dataset.name", "dataset.uri"], + name="asset_active_asset_name_uri_fkey", + ondelete="CASCADE", + ), + sa.Index("idx_asset_active_name_unique", "name", unique=True), + sa.Index("idx_asset_active_uri_unique", "uri", unique=True), + ) + with Session(bind=op.get_bind()) as session: + session.execute( + sa.text( + "insert into asset_active (name, uri) " + "select name, uri from dataset where is_orphaned = false" + ) + ) + session.commit() + with op.batch_alter_table("dataset", schema=None) as batch_op: + batch_op.drop_column("is_orphaned") + + +def downgrade(): + with op.batch_alter_table("dataset", schema=None) as batch_op: + batch_op.add_column( + sa.Column("is_orphaned", sa.Boolean, default=False, nullable=False, server_default="0") + ) + with Session(bind=op.get_bind()) as session: + session.execute( + sa.text( + "update dataset set is_orphaned = true " + "where exists (select 1 from asset_active " + "where dataset.name = asset_active.name and dataset.uri = asset_active.uri)" + ) + ) + session.commit() + op.drop_table("asset_active") diff --git a/airflow/models/asset.py b/airflow/models/asset.py index fb56bc4bf1ecf..d5ca0ea513f12 100644 --- a/airflow/models/asset.py +++ b/airflow/models/asset.py @@ -21,7 +21,6 @@ import sqlalchemy_jsonfield from sqlalchemy import ( - Boolean, Column, ForeignKey, ForeignKeyConstraint, @@ -192,7 +191,8 @@ class AssetModel(Base): created_at = Column(UtcDateTime, default=timezone.utcnow, nullable=False) updated_at = Column(UtcDateTime, default=timezone.utcnow, onupdate=timezone.utcnow, nullable=False) - is_orphaned = Column(Boolean, default=False, nullable=False, server_default="0") + + active = relationship("AssetActive", uselist=False, viewonly=True) consuming_dags = relationship("DagScheduleAssetReference", back_populates="dataset") producing_tasks = relationship("TaskOutletAssetReference", back_populates="dataset") @@ -232,6 +232,61 @@ def to_public(self) -> Asset: return Asset(uri=self.uri, extra=self.extra) +class AssetActive(Base): + """ + Collection of active assets. + + An asset is considered active if it is declared by the user in any DAG files. + AssetModel entries that are not active (also called orphaned in some parts + of the code base) are still kept in the database, but have their corresponding + entries in this table removed. This ensures we keep all possible history on + distinct assets (those with non-matching name-URI pairs), but still ensure + *name and URI are each unique* within active assets. + """ + + name = Column( + String(length=1500).with_variant( + String( + length=1500, + # latin1 allows for more indexed length in mysql + # and this field should only be ascii chars + collation="latin1_general_cs", + ), + "mysql", + ), + nullable=False, + ) + uri = Column( + String(length=1500).with_variant( + String( + length=1500, + # latin1 allows for more indexed length in mysql + # and this field should only be ascii chars + collation="latin1_general_cs", + ), + "mysql", + ), + nullable=False, + ) + + __tablename__ = "asset_active" + __table_args__ = ( + PrimaryKeyConstraint(name, uri, name="asset_active_pkey"), + ForeignKeyConstraint( + columns=[name, uri], + refcolumns=["dataset.name", "dataset.uri"], + name="asset_active_asset_name_uri_fkey", + ondelete="CASCADE", + ), + Index("idx_asset_active_name_unique", name, unique=True), + Index("idx_asset_active_uri_unique", uri, unique=True), + ) + + @classmethod + def for_asset(cls, asset: AssetModel) -> AssetActive: + return cls(name=asset.name, uri=asset.uri) + + class DagScheduleAssetAliasReference(Base): """References from a DAG to an asset alias of which it is a consumer.""" diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 1dfb3b2e9114d..f8d9f55e56e18 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -2593,6 +2593,7 @@ def bulk_write_to_db( orm_asset_aliases = asset_op.add_asset_aliases(session=session) session.flush() # This populates id so we can create fks in later calls. + asset_op.add_asset_active_references(orm_assets.values(), session=session) asset_op.add_dag_asset_references(orm_dags, orm_assets, session=session) asset_op.add_dag_asset_alias_references(orm_dags, orm_asset_aliases, session=session) asset_op.add_task_asset_references(orm_dags, orm_assets, session=session) diff --git a/airflow/serialization/pydantic/asset.py b/airflow/serialization/pydantic/asset.py index 29806d3bdf911..4cd264902091a 100644 --- a/airflow/serialization/pydantic/asset.py +++ b/airflow/serialization/pydantic/asset.py @@ -51,7 +51,6 @@ class AssetPydantic(BaseModelPydantic): extra: Optional[dict] created_at: datetime updated_at: datetime - is_orphaned: bool consuming_dags: List[DagScheduleAssetReferencePydantic] producing_tasks: List[TaskOutletAssetReferencePydantic] diff --git a/airflow/utils/db.py b/airflow/utils/db.py index fde641fa9b424..26af566c7e2bd 100644 --- a/airflow/utils/db.py +++ b/airflow/utils/db.py @@ -96,7 +96,7 @@ class MappedClassProtocol(Protocol): "2.9.0": "1949afb29106", "2.9.2": "686269002441", "2.10.0": "22ed7efa9da2", - "3.0.0": "c3389cd7793f", + "3.0.0": "5a5d66100783", } diff --git a/airflow/www/views.py b/airflow/www/views.py index 8dba4c4fcc0d8..23ccae6224960 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -3450,7 +3450,7 @@ def next_run_datasets(self, dag_id): ), isouter=True, ) - .where(DagScheduleAssetReference.dag_id == dag_id, ~AssetModel.is_orphaned) + .where(DagScheduleAssetReference.dag_id == dag_id, AssetModel.active.has()) .group_by(AssetModel.id, AssetModel.uri) .order_by(AssetModel.uri) ) @@ -3583,7 +3583,7 @@ def datasets_summary(self): if has_event_filters: count_query = count_query.join(AssetEvent, AssetEvent.dataset_id == AssetModel.id) - filters = [~AssetModel.is_orphaned] + filters = [AssetModel.active.has()] if uri_pattern: filters.append(AssetModel.uri.ilike(f"%{uri_pattern}%")) if updated_after: diff --git a/docs/apache-airflow/img/airflow_erd.sha256 b/docs/apache-airflow/img/airflow_erd.sha256 index 67d7cad45e23e..a751eca916568 100644 --- a/docs/apache-airflow/img/airflow_erd.sha256 +++ b/docs/apache-airflow/img/airflow_erd.sha256 @@ -1 +1 @@ -8bd129828ba299ef05d70305eee66d15b6c0c79dc6ae82f654b9657464e3682a \ No newline at end of file +0ed26236c783f7524416c1377638fe18ff3520bd355160db48656585ff58524e \ No newline at end of file diff --git a/docs/apache-airflow/img/airflow_erd.svg b/docs/apache-airflow/img/airflow_erd.svg index 8f180c819fe32..73452a069c040 100644 --- a/docs/apache-airflow/img/airflow_erd.svg +++ b/docs/apache-airflow/img/airflow_erd.svg @@ -4,11 +4,11 @@ - - + + %3 - + log @@ -433,48 +433,48 @@ dataset_alias - -dataset_alias - -id - - [INTEGER] - NOT NULL - -name - - [VARCHAR(3000)] - NOT NULL + +dataset_alias + +id + + [INTEGER] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL dataset_alias_dataset - -dataset_alias_dataset - -alias_id - - [INTEGER] - NOT NULL - -dataset_id - - [INTEGER] - NOT NULL + +dataset_alias_dataset + +alias_id + + [INTEGER] + NOT NULL + +dataset_id + + [INTEGER] + NOT NULL dataset_alias--dataset_alias_dataset - -0..N -1 + +0..N +1 dataset_alias--dataset_alias_dataset - -0..N -1 + +0..N +1 @@ -495,16 +495,16 @@ dataset_alias--dataset_alias_dataset_event - -0..N -1 + +0..N +1 dataset_alias--dataset_alias_dataset_event - -0..N -1 + +0..N +1 @@ -535,72 +535,97 @@ dataset_alias--dag_schedule_dataset_alias_reference - -0..N -1 + +0..N +1 dataset - -dataset - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -extra - - [JSON] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -is_orphaned - - [BOOLEAN] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +dataset + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +extra + + [JSON] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL dataset--dataset_alias_dataset - -0..N -1 + +0..N +1 dataset--dataset_alias_dataset - -0..N -1 + +0..N +1 - + +asset_active + +asset_active + +name + + [VARCHAR(1500)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL + + + +dataset--asset_active + +1 +1 + + + +dataset--asset_active + +1 +1 + + + dag_schedule_dataset_reference dag_schedule_dataset_reference @@ -626,14 +651,14 @@ NOT NULL - + dataset--dag_schedule_dataset_reference - -0..N -1 + +0..N +1 - + task_outlet_dataset_reference task_outlet_dataset_reference @@ -664,14 +689,14 @@ NOT NULL - + dataset--task_outlet_dataset_reference - -0..N -1 + +0..N +1 - + dataset_dag_run_queue dataset_dag_run_queue @@ -692,70 +717,70 @@ NOT NULL - + dataset--dataset_dag_run_queue - -0..N -1 + +0..N +1 - + dataset_event - -dataset_event - -id - - [INTEGER] - NOT NULL - -dataset_id - - [INTEGER] - NOT NULL - -extra - - [JSON] - NOT NULL - -source_dag_id - - [VARCHAR(250)] - -source_map_index - - [INTEGER] - -source_run_id - - [VARCHAR(250)] - -source_task_id - - [VARCHAR(250)] - -timestamp - - [TIMESTAMP] - NOT NULL + +dataset_event + +id + + [INTEGER] + NOT NULL + +dataset_id + + [INTEGER] + NOT NULL + +extra + + [JSON] + NOT NULL + +source_dag_id + + [VARCHAR(250)] + +source_map_index + + [INTEGER] + +source_run_id + + [VARCHAR(250)] + +source_task_id + + [VARCHAR(250)] + +timestamp + + [TIMESTAMP] + NOT NULL - + dataset_event--dataset_alias_dataset_event - -0..N -1 + +0..N +1 - + dataset_event--dataset_alias_dataset_event - -0..N -1 + +0..N +1 - + dagrun_dataset_event dagrun_dataset_event @@ -771,156 +796,156 @@ NOT NULL - + dataset_event--dagrun_dataset_event - -0..N -1 + +0..N +1 - + dag - -dag - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_display_name - - [VARCHAR(2000)] - -dataset_expression - - [JSON] - -default_view - - [VARCHAR(25)] - -description - - [TEXT] - -fileloc - - [VARCHAR(2000)] - -has_import_errors - - [BOOLEAN] - -has_task_concurrency_limits - - [BOOLEAN] - NOT NULL - -is_active - - [BOOLEAN] - -is_paused - - [BOOLEAN] - -last_expired - - [TIMESTAMP] - -last_parsed_time - - [TIMESTAMP] - -last_pickled - - [TIMESTAMP] - -max_active_runs - - [INTEGER] - -max_active_tasks - - [INTEGER] - NOT NULL - -max_consecutive_failed_dag_runs - - [INTEGER] - NOT NULL - -next_dagrun - - [TIMESTAMP] - -next_dagrun_create_after - - [TIMESTAMP] - -next_dagrun_data_interval_end - - [TIMESTAMP] - -next_dagrun_data_interval_start - - [TIMESTAMP] - -owners - - [VARCHAR(2000)] - -pickle_id - - [INTEGER] - -processor_subdir - - [VARCHAR(2000)] - -scheduler_lock - - [BOOLEAN] - -timetable_description - - [VARCHAR(1000)] - -timetable_summary - - [TEXT] + +dag + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_display_name + + [VARCHAR(2000)] + +dataset_expression + + [JSON] + +default_view + + [VARCHAR(25)] + +description + + [TEXT] + +fileloc + + [VARCHAR(2000)] + +has_import_errors + + [BOOLEAN] + +has_task_concurrency_limits + + [BOOLEAN] + NOT NULL + +is_active + + [BOOLEAN] + +is_paused + + [BOOLEAN] + +last_expired + + [TIMESTAMP] + +last_parsed_time + + [TIMESTAMP] + +last_pickled + + [TIMESTAMP] + +max_active_runs + + [INTEGER] + +max_active_tasks + + [INTEGER] + NOT NULL + +max_consecutive_failed_dag_runs + + [INTEGER] + NOT NULL + +next_dagrun + + [TIMESTAMP] + +next_dagrun_create_after + + [TIMESTAMP] + +next_dagrun_data_interval_end + + [TIMESTAMP] + +next_dagrun_data_interval_start + + [TIMESTAMP] + +owners + + [VARCHAR(2000)] + +pickle_id + + [INTEGER] + +processor_subdir + + [VARCHAR(2000)] + +scheduler_lock + + [BOOLEAN] + +timetable_description + + [VARCHAR(1000)] + +timetable_summary + + [TEXT] - + dag--dag_schedule_dataset_alias_reference - -0..N -1 + +0..N +1 - + dag--dag_schedule_dataset_reference - -0..N -1 + +0..N +1 - + dag--task_outlet_dataset_reference - -0..N -1 + +0..N +1 - + dag--dataset_dag_run_queue - -0..N -1 + +0..N +1 - + dag_tag dag_tag @@ -936,14 +961,14 @@ NOT NULL - + dag--dag_tag - -0..N -1 + +0..N +1 - + dag_owner_attributes dag_owner_attributes @@ -964,14 +989,14 @@ NOT NULL - + dag--dag_owner_attributes - -0..N -1 + +0..N +1 - + dag_warning dag_warning @@ -997,14 +1022,14 @@ NOT NULL - + dag--dag_warning - -0..N -1 + +0..N +1 - + log_template log_template @@ -1030,7 +1055,7 @@ NOT NULL - + dag_run dag_run @@ -1126,21 +1151,21 @@ [TIMESTAMP] - + log_template--dag_run 0..N {0,1} - + dag_run--dagrun_dataset_event 0..N 1 - + task_instance task_instance @@ -1280,21 +1305,21 @@ [TIMESTAMP] - + dag_run--task_instance 0..N 1 - + dag_run--task_instance 0..N 1 - + dag_run_note dag_run_note @@ -1323,14 +1348,14 @@ [VARCHAR(128)] - + dag_run--dag_run_note 1 1 - + backfill_dag_run backfill_dag_run @@ -1355,14 +1380,14 @@ NOT NULL - + dag_run--backfill_dag_run 0..N {0,1} - + task_reschedule task_reschedule @@ -1418,49 +1443,49 @@ NOT NULL - + dag_run--task_reschedule 0..N 1 - + dag_run--task_reschedule 0..N 1 - + task_instance--task_reschedule 0..N 1 - + task_instance--task_reschedule -0..N +0..N 1 - + task_instance--task_reschedule -0..N +0..N 1 - + task_instance--task_reschedule 0..N 1 - + rendered_task_instance_fields rendered_task_instance_fields @@ -1495,35 +1520,35 @@ NOT NULL - + task_instance--rendered_task_instance_fields 0..N 1 - + task_instance--rendered_task_instance_fields 0..N 1 - + task_instance--rendered_task_instance_fields 0..N 1 - + task_instance--rendered_task_instance_fields 0..N 1 - + task_fail task_fail @@ -1566,35 +1591,35 @@ NOT NULL - + task_instance--task_fail 0..N 1 - + task_instance--task_fail 0..N 1 - + task_instance--task_fail 0..N 1 - + task_instance--task_fail 0..N 1 - + task_map task_map @@ -1629,35 +1654,35 @@ NOT NULL - + task_instance--task_map 0..N 1 - + task_instance--task_map 0..N 1 - + task_instance--task_map 0..N 1 - + task_instance--task_map 0..N 1 - + xcom xcom @@ -1702,35 +1727,35 @@ [BYTEA] - + task_instance--xcom 0..N 1 - + task_instance--xcom 0..N 1 - + task_instance--xcom 0..N 1 - + task_instance--xcom 0..N 1 - + task_instance_note task_instance_note @@ -1774,35 +1799,35 @@ [VARCHAR(128)] - + task_instance--task_instance_note 0..N 1 - + task_instance--task_instance_note 0..N 1 - + task_instance--task_instance_note 0..N 1 - + task_instance--task_instance_note 0..N 1 - + task_instance_history task_instance_history @@ -1948,35 +1973,35 @@ [TIMESTAMP] - + task_instance--task_instance_history 0..N 1 - + task_instance--task_instance_history 0..N 1 - + task_instance--task_instance_history 0..N 1 - + task_instance--task_instance_history 0..N 1 - + backfill backfill @@ -2029,21 +2054,21 @@ NOT NULL - + backfill--dag_run 0..N {0,1} - + backfill--backfill_dag_run 0..N 1 - + trigger trigger @@ -2073,327 +2098,327 @@ [INTEGER] - + trigger--task_instance 0..N {0,1} - + session - -session - -id - - [INTEGER] - NOT NULL - -data - - [BYTEA] - -expiry - - [TIMESTAMP] - -session_id - - [VARCHAR(255)] + +session + +id + + [INTEGER] + NOT NULL + +data + + [BYTEA] + +expiry + + [TIMESTAMP] + +session_id + + [VARCHAR(255)] - + alembic_version - -alembic_version - -version_num - - [VARCHAR(32)] - NOT NULL + +alembic_version + +version_num + + [VARCHAR(32)] + NOT NULL - + ab_user - -ab_user - -id - - [INTEGER] - NOT NULL - -active - - [BOOLEAN] - -changed_by_fk - - [INTEGER] - -changed_on - - [TIMESTAMP] - -created_by_fk - - [INTEGER] - -created_on - - [TIMESTAMP] - -email - - [VARCHAR(512)] - NOT NULL - -fail_login_count - - [INTEGER] - -first_name - - [VARCHAR(256)] - NOT NULL - -last_login - - [TIMESTAMP] - -last_name - - [VARCHAR(256)] - NOT NULL - -login_count - - [INTEGER] - -password - - [VARCHAR(256)] - -username - - [VARCHAR(512)] - NOT NULL + +ab_user + +id + + [INTEGER] + NOT NULL + +active + + [BOOLEAN] + +changed_by_fk + + [INTEGER] + +changed_on + + [TIMESTAMP] + +created_by_fk + + [INTEGER] + +created_on + + [TIMESTAMP] + +email + + [VARCHAR(512)] + NOT NULL + +fail_login_count + + [INTEGER] + +first_name + + [VARCHAR(256)] + NOT NULL + +last_login + + [TIMESTAMP] + +last_name + + [VARCHAR(256)] + NOT NULL + +login_count + + [INTEGER] + +password + + [VARCHAR(256)] + +username + + [VARCHAR(512)] + NOT NULL - + ab_user--ab_user - -0..N -{0,1} + +0..N +{0,1} - + ab_user--ab_user - -0..N -{0,1} + +0..N +{0,1} - + ab_user_role - -ab_user_role - -id - - [INTEGER] - NOT NULL - -role_id - - [INTEGER] - -user_id - - [INTEGER] + +ab_user_role + +id + + [INTEGER] + NOT NULL + +role_id + + [INTEGER] + +user_id + + [INTEGER] - + ab_user--ab_user_role - -0..N -{0,1} + +0..N +{0,1} - + ab_register_user - -ab_register_user - -id - - [INTEGER] - NOT NULL - -email - - [VARCHAR(512)] - NOT NULL - -first_name - - [VARCHAR(256)] - NOT NULL - -last_name - - [VARCHAR(256)] - NOT NULL - -password - - [VARCHAR(256)] - -registration_date - - [TIMESTAMP] - -registration_hash - - [VARCHAR(256)] - -username - - [VARCHAR(512)] - NOT NULL + +ab_register_user + +id + + [INTEGER] + NOT NULL + +email + + [VARCHAR(512)] + NOT NULL + +first_name + + [VARCHAR(256)] + NOT NULL + +last_name + + [VARCHAR(256)] + NOT NULL + +password + + [VARCHAR(256)] + +registration_date + + [TIMESTAMP] + +registration_hash + + [VARCHAR(256)] + +username + + [VARCHAR(512)] + NOT NULL - + ab_permission - -ab_permission - -id - - [INTEGER] - NOT NULL - -name - - [VARCHAR(100)] - NOT NULL + +ab_permission + +id + + [INTEGER] + NOT NULL + +name + + [VARCHAR(100)] + NOT NULL - + ab_permission_view - -ab_permission_view - -id - - [INTEGER] - NOT NULL - -permission_id - - [INTEGER] - -view_menu_id - - [INTEGER] + +ab_permission_view + +id + + [INTEGER] + NOT NULL + +permission_id + + [INTEGER] + +view_menu_id + + [INTEGER] - + ab_permission--ab_permission_view - -0..N -{0,1} + +0..N +{0,1} - + ab_permission_view_role - -ab_permission_view_role - -id - - [INTEGER] - NOT NULL - -permission_view_id - - [INTEGER] - -role_id - - [INTEGER] + +ab_permission_view_role + +id + + [INTEGER] + NOT NULL + +permission_view_id + + [INTEGER] + +role_id + + [INTEGER] - + ab_permission_view--ab_permission_view_role - -0..N -{0,1} + +0..N +{0,1} - + ab_view_menu - -ab_view_menu - -id - - [INTEGER] - NOT NULL - -name - - [VARCHAR(250)] - NOT NULL + +ab_view_menu + +id + + [INTEGER] + NOT NULL + +name + + [VARCHAR(250)] + NOT NULL - + ab_view_menu--ab_permission_view - -0..N -{0,1} + +0..N +{0,1} - + ab_role - -ab_role - -id - - [INTEGER] - NOT NULL - -name - - [VARCHAR(64)] - NOT NULL + +ab_role + +id + + [INTEGER] + NOT NULL + +name + + [VARCHAR(64)] + NOT NULL - + ab_role--ab_user_role - -0..N -{0,1} + +0..N +{0,1} - + ab_role--ab_permission_view_role - -0..N -{0,1} + +0..N +{0,1} - + alembic_version_fab - -alembic_version_fab - -version_num - - [VARCHAR(32)] - NOT NULL + +alembic_version_fab + +version_num + + [VARCHAR(32)] + NOT NULL diff --git a/docs/apache-airflow/migrations-ref.rst b/docs/apache-airflow/migrations-ref.rst index 6b348013bd3ce..f917717d49efe 100644 --- a/docs/apache-airflow/migrations-ref.rst +++ b/docs/apache-airflow/migrations-ref.rst @@ -39,7 +39,9 @@ Here's the list of all the Database Migrations that are executed via when you ru +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | Revision ID | Revises ID | Airflow Version | Description | +=========================+==================+===================+==============================================================+ -| ``c3389cd7793f`` (head) | ``0d9e73a75ee4`` | ``3.0.0`` | Add backfill to dag run model. | +| ``5a5d66100783`` (head) | ``c3389cd7793f`` | ``3.0.0`` | Add AssetActive to track orphaning instead of a flag. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``c3389cd7793f`` | ``0d9e73a75ee4`` | ``3.0.0`` | Add backfill to dag run model. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | ``0d9e73a75ee4`` | ``44eabb1904b4`` | ``3.0.0`` | Add name and group fields to DatasetModel. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index f47f4497d5aab..5244e96490984 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -6086,9 +6086,9 @@ def test_asset_orphaning(self, dag_maker, session): with dag_maker(dag_id="assets-1", schedule=[asset1, asset2], session=session): BashOperator(task_id="task", bash_command="echo 1", outlets=[asset3, asset4]) - non_orphaned_asset_count = session.query(AssetModel).filter(~AssetModel.is_orphaned).count() + non_orphaned_asset_count = session.query(AssetModel).filter(AssetModel.active.has()).count() assert non_orphaned_asset_count == 4 - orphaned_asset_count = session.query(AssetModel).filter(AssetModel.is_orphaned).count() + orphaned_asset_count = session.query(AssetModel).filter(~AssetModel.active.has()).count() assert orphaned_asset_count == 0 # now remove 2 asset references @@ -6105,14 +6105,13 @@ def test_asset_orphaning(self, dag_maker, session): non_orphaned_assets = [ asset.uri for asset in session.query(AssetModel.uri) - .filter(~AssetModel.is_orphaned) + .filter(AssetModel.active.has()) .order_by(AssetModel.uri) ] assert non_orphaned_assets == ["ds1", "ds3"] - orphaned_assets = [ - asset.uri - for asset in session.query(AssetModel.uri).filter(AssetModel.is_orphaned).order_by(AssetModel.uri) - ] + orphaned_assets = session.scalars( + select(AssetModel.uri).where(~AssetModel.active.has()).order_by(AssetModel.uri) + ).all() assert orphaned_assets == ["ds2", "ds4"] def test_asset_orphaning_ignore_orphaned_assets(self, dag_maker, session): @@ -6121,9 +6120,9 @@ def test_asset_orphaning_ignore_orphaned_assets(self, dag_maker, session): with dag_maker(dag_id="assets-1", schedule=[asset1], session=session): BashOperator(task_id="task", bash_command="echo 1") - non_orphaned_asset_count = session.query(AssetModel).filter(~AssetModel.is_orphaned).count() + non_orphaned_asset_count = session.query(AssetModel).filter(AssetModel.active.has()).count() assert non_orphaned_asset_count == 1 - orphaned_asset_count = session.query(AssetModel).filter(AssetModel.is_orphaned).count() + orphaned_asset_count = session.query(AssetModel).filter(~AssetModel.active.has()).count() assert orphaned_asset_count == 0 # now remove asset1 reference @@ -6138,7 +6137,7 @@ def test_asset_orphaning_ignore_orphaned_assets(self, dag_maker, session): orphaned_assets_before_rerun = ( session.query(AssetModel.updated_at, AssetModel.uri) - .filter(AssetModel.is_orphaned) + .filter(~AssetModel.active.has()) .order_by(AssetModel.uri) ) assert [asset.uri for asset in orphaned_assets_before_rerun] == ["ds1"] @@ -6151,7 +6150,7 @@ def test_asset_orphaning_ignore_orphaned_assets(self, dag_maker, session): orphaned_assets_after_rerun = ( session.query(AssetModel.updated_at, AssetModel.uri) - .filter(AssetModel.is_orphaned) + .filter(~AssetModel.active.has()) .order_by(AssetModel.uri) ) assert [asset.uri for asset in orphaned_assets_after_rerun] == ["ds1"] diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index 0fe22ccf77fe3..b439487b0161e 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -1080,10 +1080,10 @@ def test_bulk_write_to_db_unorphan_assets(self): # orphans asset1 = Asset(uri="ds1") asset2 = Asset(uri="ds2") - session.add(AssetModel(uri=asset2.uri, is_orphaned=True)) + session.add(AssetModel(uri=asset2.uri)) asset3 = Asset(uri="ds3") asset4 = Asset(uri="ds4") - session.add(AssetModel(uri=asset4.uri, is_orphaned=True)) + session.add(AssetModel(uri=asset4.uri)) session.flush() dag1 = DAG(dag_id="assets-1", start_date=DEFAULT_DATE, schedule=[asset1]) @@ -1095,14 +1095,14 @@ def test_bulk_write_to_db_unorphan_assets(self): non_orphaned_assets = [ asset.uri for asset in session.query(AssetModel.uri) - .filter(~AssetModel.is_orphaned) + .filter(AssetModel.active.has()) .order_by(AssetModel.uri) ] assert non_orphaned_assets == ["ds1", "ds3"] orphaned_assets = [ asset.uri for asset in session.query(AssetModel.uri) - .filter(AssetModel.is_orphaned) + .filter(~AssetModel.active.has()) .order_by(AssetModel.uri) ] assert orphaned_assets == ["ds2", "ds4"] @@ -1114,9 +1114,9 @@ def test_bulk_write_to_db_unorphan_assets(self): DAG.bulk_write_to_db([dag1], session=session) # and count the orphans and non-orphans - non_orphaned_asset_count = session.query(AssetModel).filter(~AssetModel.is_orphaned).count() + non_orphaned_asset_count = session.query(AssetModel).filter(AssetModel.active.has()).count() assert non_orphaned_asset_count == 4 - orphaned_asset_count = session.query(AssetModel).filter(AssetModel.is_orphaned).count() + orphaned_asset_count = session.query(AssetModel).filter(~AssetModel.active.has()).count() assert orphaned_asset_count == 0 def test_bulk_write_to_db_asset_aliases(self): diff --git a/tests/utils/test_db_cleanup.py b/tests/utils/test_db_cleanup.py index c7533827d805d..ba7cc9476c933 100644 --- a/tests/utils/test_db_cleanup.py +++ b/tests/utils/test_db_cleanup.py @@ -329,6 +329,7 @@ def test_no_models_missing(self): "backfill_dag_run", # todo: AIP-78 "ab_user", "variable", # leave alone + "asset_active", # not good way to know if "stale" "dataset", # not good way to know if "stale" "dataset_alias", # not good way to know if "stale" "task_map", # keys to TI, so no need diff --git a/tests/www/views/test_views_dataset.py b/tests/www/views/test_views_dataset.py index 9d83715f3e210..bc40b9010c29c 100644 --- a/tests/www/views/test_views_dataset.py +++ b/tests/www/views/test_views_dataset.py @@ -22,7 +22,7 @@ from dateutil.tz import UTC from airflow.assets import Asset -from airflow.models.asset import AssetEvent, AssetModel +from airflow.models.asset import AssetActive, AssetEvent, AssetModel from airflow.operators.empty import EmptyOperator from tests_common.test_utils.asserts import assert_queries_count from tests_common.test_utils.db import clear_db_assets @@ -37,17 +37,22 @@ def _cleanup(self): yield clear_db_assets() + @pytest.fixture + def create_assets(self, session): + def create(indexes): + assets = [AssetModel(id=i, uri=f"s3://bucket/key/{i}") for i in indexes] + session.add_all(assets) + session.flush() + session.add_all(AssetActive.for_asset(a) for a in assets) + session.flush() + return assets + + return create + class TestGetDatasets(TestDatasetEndpoint): - def test_should_respond_200(self, admin_client, session): - assets = [ - AssetModel( - id=i, - uri=f"s3://bucket/key/{i}", - ) - for i in [1, 2] - ] - session.add_all(assets) + def test_should_respond_200(self, admin_client, create_assets, session): + create_assets([1, 2]) session.commit() assert session.query(AssetModel).count() == 2 @@ -74,14 +79,8 @@ def test_should_respond_200(self, admin_client, session): "total_entries": 2, } - def test_order_by_raises_400_for_invalid_attr(self, admin_client, session): - assets = [ - AssetModel( - uri=f"s3://bucket/key/{i}", - ) - for i in [1, 2] - ] - session.add_all(assets) + def test_order_by_raises_400_for_invalid_attr(self, admin_client, create_assets, session): + create_assets([1, 2]) session.commit() assert session.query(AssetModel).count() == 2 @@ -91,9 +90,8 @@ def test_order_by_raises_400_for_invalid_attr(self, admin_client, session): msg = "Ordering with 'fake' is disallowed or the attribute does not exist on the model" assert response.json["detail"] == msg - def test_order_by_raises_400_for_invalid_datetimes(self, admin_client, session): - assets = [AssetModel(uri=f"s3://bucket/key/{i}") for i in [1, 2]] - session.add_all(assets) + def test_order_by_raises_400_for_invalid_datetimes(self, admin_client, create_assets, session): + create_assets([1, 2]) session.commit() assert session.query(AssetModel).count() == 2 @@ -107,24 +105,17 @@ def test_order_by_raises_400_for_invalid_datetimes(self, admin_client, session): assert response.status_code == 400 assert "Invalid datetime:" in response.text - def test_filter_by_datetimes(self, admin_client, session): + def test_filter_by_datetimes(self, admin_client, create_assets, session): today = pendulum.today("UTC") - assets = [ - AssetModel( - id=i, - uri=f"s3://bucket/key/{i}", - ) - for i in range(1, 4) - ] - session.add_all(assets) + assets = create_assets(range(1, 4)) # Update assets, one per day, starting with assets[0], ending with assets[2] asset_events = [ AssetEvent( - dataset_id=assets[i].id, + dataset_id=a.id, timestamp=today.add(days=-len(assets) + i + 1), ) - for i in range(len(assets)) + for i, a in enumerate(assets) ] session.add_all(asset_events) session.commit() @@ -153,15 +144,8 @@ def test_filter_by_datetimes(self, admin_client, session): ("-last_dataset_update", [2, 3, 1, 4]), ], ) - def test_order_by(self, admin_client, session, order_by, ordered_asset_ids): - assets = [ - AssetModel( - id=i, - uri=f"s3://bucket/key/{i}", - ) - for i in range(1, len(ordered_asset_ids) + 1) - ] - session.add_all(assets) + def test_order_by(self, admin_client, session, create_assets, order_by, ordered_asset_ids): + assets = create_assets(range(1, len(ordered_asset_ids) + 1)) asset_events = [ AssetEvent( dataset_id=assets[2].id, @@ -186,15 +170,8 @@ def test_order_by(self, admin_client, session, order_by, ordered_asset_ids): assert ordered_asset_ids == [json_dict["id"] for json_dict in response.json["datasets"]] assert response.json["total_entries"] == len(ordered_asset_ids) - def test_search_uri_pattern(self, admin_client, session): - assets = [ - AssetModel( - id=i, - uri=f"s3://bucket/key_{i}", - ) - for i in [1, 2] - ] - session.add_all(assets) + def test_search_uri_pattern(self, admin_client, create_assets, session): + create_assets([1, 2]) session.commit() assert session.query(AssetModel).count() == 2 @@ -207,7 +184,7 @@ def test_search_uri_pattern(self, admin_client, session): "datasets": [ { "id": 2, - "uri": "s3://bucket/key_2", + "uri": "s3://bucket/key/2", "last_dataset_update": None, "total_updates": 0, }, @@ -224,13 +201,13 @@ def test_search_uri_pattern(self, admin_client, session): "datasets": [ { "id": 1, - "uri": "s3://bucket/key_1", + "uri": "s3://bucket/key/1", "last_dataset_update": None, "total_updates": 0, }, { "id": 2, - "uri": "s3://bucket/key_2", + "uri": "s3://bucket/key/2", "last_dataset_update": None, "total_updates": 0, }, @@ -389,15 +366,8 @@ class TestGetDatasetsEndpointPagination(TestDatasetEndpoint): ("/object/datasets_summary?offset=3&limit=3", [f"s3://bucket/key/{i}" for i in [4, 5, 6]]), ], ) - def test_limit_and_offset(self, admin_client, session, url, expected_dataset_uris): - assets = [ - AssetModel( - uri=f"s3://bucket/key/{i}", - extra={"foo": "bar"}, - ) - for i in range(1, 10) - ] - session.add_all(assets) + def test_limit_and_offset(self, admin_client, create_assets, session, url, expected_dataset_uris): + create_assets(range(1, 10)) session.commit() response = admin_client.get(url) @@ -406,15 +376,8 @@ def test_limit_and_offset(self, admin_client, session, url, expected_dataset_uri dataset_uris = [dataset["uri"] for dataset in response.json["datasets"]] assert dataset_uris == expected_dataset_uris - def test_should_respect_page_size_limit_default(self, admin_client, session): - assets = [ - AssetModel( - uri=f"s3://bucket/key/{i}", - extra={"foo": "bar"}, - ) - for i in range(1, 60) - ] - session.add_all(assets) + def test_should_respect_page_size_limit_default(self, admin_client, create_assets, session): + create_assets(range(1, 60)) session.commit() response = admin_client.get("/object/datasets_summary") @@ -422,15 +385,8 @@ def test_should_respect_page_size_limit_default(self, admin_client, session): assert response.status_code == 200 assert len(response.json["datasets"]) == 25 - def test_should_return_max_if_req_above(self, admin_client, session): - assets = [ - AssetModel( - uri=f"s3://bucket/key/{i}", - extra={"foo": "bar"}, - ) - for i in range(1, 60) - ] - session.add_all(assets) + def test_should_return_max_if_req_above(self, admin_client, create_assets, session): + create_assets(range(1, 60)) session.commit() response = admin_client.get("/object/datasets_summary?limit=180") From 6c9c32710e1ebbfb244216763239169e5c972d02 Mon Sep 17 00:00:00 2001 From: Omkar P <45419097+omkar-foss@users.noreply.github.com> Date: Tue, 15 Oct 2024 14:15:51 +0530 Subject: [PATCH 121/125] Migrate the public endpoint Delete DAG to FastAPI (#42914) * Migrate the public endpoint Delete DAG to FastAPI * Refactor tests --- .../api_connexion/endpoints/dag_endpoint.py | 2 + airflow/api_fastapi/openapi/v1-generated.yaml | 49 ++++ airflow/api_fastapi/views/public/dags.py | 19 +- airflow/ui/openapi-gen/queries/common.ts | 3 + airflow/ui/openapi-gen/queries/queries.ts | 37 +++ .../ui/openapi-gen/requests/services.gen.ts | 29 +++ airflow/ui/openapi-gen/requests/types.gen.ts | 35 +++ airflow/utils/api_migration.py | 2 +- tests/api_fastapi/views/public/test_dags.py | 242 ++++++++++++------ 9 files changed, 336 insertions(+), 82 deletions(-) diff --git a/airflow/api_connexion/endpoints/dag_endpoint.py b/airflow/api_connexion/endpoints/dag_endpoint.py index 3d0d3dd8bfabe..0352297bfffd4 100644 --- a/airflow/api_connexion/endpoints/dag_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_endpoint.py @@ -52,6 +52,7 @@ from airflow.api_connexion.types import APIResponse, UpdateMask +@mark_fastapi_migration_done @security.requires_access_dag("GET") @provide_session def get_dag( @@ -215,6 +216,7 @@ def patch_dags(limit, session, offset=0, only_active=True, tags=None, dag_id_pat return dags_collection_schema.dump(DAGCollection(dags=dags, total_entries=total_entries)) +@mark_fastapi_migration_done @security.requires_access_dag("DELETE") @action_logging @provide_session diff --git a/airflow/api_fastapi/openapi/v1-generated.yaml b/airflow/api_fastapi/openapi/v1-generated.yaml index 235410a6d347a..56f48c73e987f 100644 --- a/airflow/api_fastapi/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/openapi/v1-generated.yaml @@ -408,6 +408,55 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + delete: + tags: + - DAG + summary: Delete Dag + description: Delete the specific DAG. + operationId: delete_dag + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unprocessable Entity /public/dags/{dag_id}/details: get: tags: diff --git a/airflow/api_fastapi/views/public/dags.py b/airflow/api_fastapi/views/public/dags.py index ca0f44162eb2d..eb8233a7f700d 100644 --- a/airflow/api_fastapi/views/public/dags.py +++ b/airflow/api_fastapi/views/public/dags.py @@ -17,11 +17,12 @@ from __future__ import annotations -from fastapi import Depends, HTTPException, Query, Request +from fastapi import Depends, HTTPException, Query, Request, Response from sqlalchemy import update from sqlalchemy.orm import Session from typing_extensions import Annotated +from airflow.api.common import delete_dag as delete_dag_module from airflow.api_fastapi.db.common import ( get_session, paginated_select, @@ -48,6 +49,7 @@ DAGResponse, ) from airflow.api_fastapi.views.router import AirflowRouter +from airflow.exceptions import AirflowException, DagNotFound from airflow.models import DAG, DagModel dags_router = AirflowRouter(tags=["DAG"], prefix="/dags") @@ -204,3 +206,18 @@ async def patch_dags( dags=[DAGResponse.model_validate(dag, from_attributes=True) for dag in dags], total_entries=total_entries, ) + + +@dags_router.delete("/{dag_id}", responses=create_openapi_http_exception_doc([400, 401, 403, 404, 422])) +async def delete_dag( + dag_id: str, + session: Annotated[Session, Depends(get_session)], +) -> Response: + """Delete the specific DAG.""" + try: + delete_dag_module.delete_dag(dag_id, session=session) + except DagNotFound: + raise HTTPException(404, f"Dag with id: {dag_id} was not found") + except AirflowException: + raise HTTPException(409, f"Task instances of dag with id: '{dag_id}' are still running") + return Response(status_code=204) diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index 426e28447fd9c..2f1c6a78d92f0 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -197,6 +197,9 @@ export type DagServicePatchDagMutationResult = Awaited< export type VariableServicePatchVariableMutationResult = Awaited< ReturnType >; +export type DagServiceDeleteDagMutationResult = Awaited< + ReturnType +>; export type ConnectionServiceDeleteConnectionMutationResult = Awaited< ReturnType >; diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index 557a7ba8ffa17..a16bdf165b182 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -514,6 +514,43 @@ export const useVariableServicePatchVariable = < }) as unknown as Promise, ...options, }); +/** + * Delete Dag + * Delete the specific DAG. + * @param data The data for the request. + * @param data.dagId + * @returns unknown Successful Response + * @throws ApiError + */ +export const useDagServiceDeleteDag = < + TData = Common.DagServiceDeleteDagMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + }, + TContext + >({ + mutationFn: ({ dagId }) => + DagService.deleteDag({ dagId }) as unknown as Promise, + ...options, + }); /** * Delete Connection * Delete a connection entry. diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 78b113c7f2f80..8d7f0cee2b295 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -15,6 +15,8 @@ import type { GetDagResponse, PatchDagData, PatchDagResponse, + DeleteDagData, + DeleteDagResponse, GetDagDetailsData, GetDagDetailsResponse, DeleteConnectionData, @@ -234,6 +236,33 @@ export class DagService { }); } + /** + * Delete Dag + * Delete the specific DAG. + * @param data The data for the request. + * @param data.dagId + * @returns unknown Successful Response + * @throws ApiError + */ + public static deleteDag( + data: DeleteDagData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "DELETE", + url: "/public/dags/{dag_id}", + path: { + dag_id: data.dagId, + }, + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Unprocessable Entity", + }, + }); + } + /** * Get Dag Details * Get details of DAG. diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 856517d560fe4..7f603a1adb4d3 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -327,6 +327,12 @@ export type PatchDagData = { export type PatchDagResponse = DAGResponse; +export type DeleteDagData = { + dagId: string; +}; + +export type DeleteDagResponse = unknown; + export type GetDagDetailsData = { dagId: string; }; @@ -525,6 +531,35 @@ export type $OpenApiTs = { 422: HTTPValidationError; }; }; + delete: { + req: DeleteDagData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Unprocessable Entity + */ + 422: HTTPExceptionResponse; + }; + }; }; "/public/dags/{dag_id}/details": { get: { diff --git a/airflow/utils/api_migration.py b/airflow/utils/api_migration.py index d6b61a933deed..3e6ba3881cbed 100644 --- a/airflow/utils/api_migration.py +++ b/airflow/utils/api_migration.py @@ -31,7 +31,7 @@ RT = TypeVar("RT") -def mark_fastapi_migration_done(function: Callable[PS, RT]) -> Callable[PS, RT]: +def mark_fastapi_migration_done(function: Callable[..., RT]) -> Callable[..., RT]: """ Mark an endpoint as migrated over to the new FastAPI API. diff --git a/tests/api_fastapi/views/public/test_dags.py b/tests/api_fastapi/views/public/test_dags.py index cd1809cb70d3d..ab0c54f51764f 100644 --- a/tests/api_fastapi/views/public/test_dags.py +++ b/tests/api_fastapi/views/public/test_dags.py @@ -25,7 +25,7 @@ from airflow.models.dagrun import DagRun from airflow.operators.empty import EmptyOperator from airflow.utils.session import provide_session -from airflow.utils.state import DagRunState +from airflow.utils.state import DagRunState, TaskInstanceState from airflow.utils.types import DagRunType from tests_common.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags @@ -36,84 +36,100 @@ DAG2_ID = "test_dag2" DAG2_START_DATE = datetime(2021, 6, 15, tzinfo=timezone.utc) DAG3_ID = "test_dag3" +DAG4_ID = "test_dag4" +DAG4_DISPLAY_NAME = "display4" +DAG5_ID = "test_dag5" +DAG5_DISPLAY_NAME = "display5" TASK_ID = "op1" UTC_JSON_REPR = "UTC" if pendulum.__version__.startswith("3") else "Timezone('UTC')" +API_PREFIX = "/public/dags" + + +class TestDagEndpoint: + """Common class for /public/dags related unit tests.""" + + @staticmethod + def _clear_db(): + clear_db_runs() + clear_db_dags() + clear_db_serialized_dags() + + def _create_deactivated_paused_dag(self, session=None): + dag_model = DagModel( + dag_id=DAG3_ID, + fileloc="/tmp/dag_del_1.py", + timetable_summary="2 2 * * *", + is_active=False, + is_paused=True, + owners="test_owner,another_test_owner", + next_dagrun=datetime(2021, 1, 1, 12, 0, 0, tzinfo=timezone.utc), + ) + + dagrun_failed = DagRun( + dag_id=DAG3_ID, + run_id="run1", + execution_date=datetime(2018, 1, 1, 12, 0, 0, tzinfo=timezone.utc), + start_date=datetime(2018, 1, 1, 12, 0, 0, tzinfo=timezone.utc), + run_type=DagRunType.SCHEDULED, + state=DagRunState.FAILED, + ) + + dagrun_success = DagRun( + dag_id=DAG3_ID, + run_id="run2", + execution_date=datetime(2019, 1, 1, 12, 0, 0, tzinfo=timezone.utc), + start_date=datetime(2019, 1, 1, 12, 0, 0, tzinfo=timezone.utc), + run_type=DagRunType.MANUAL, + state=DagRunState.SUCCESS, + ) + + session.add(dag_model) + session.add(dagrun_failed) + session.add(dagrun_success) + + @pytest.fixture(autouse=True) + @provide_session + def setup(self, dag_maker, session=None) -> None: + self._clear_db() + + with dag_maker( + DAG1_ID, + dag_display_name=DAG1_DISPLAY_NAME, + schedule=None, + start_date=datetime(2018, 6, 15, 0, 0, tzinfo=timezone.utc), + doc_md="details", + params={"foo": 1}, + tags=["example"], + ): + EmptyOperator(task_id=TASK_ID) + + dag_maker.create_dagrun(state=DagRunState.FAILED) + + with dag_maker( + DAG2_ID, + schedule=None, + start_date=DAG2_START_DATE, + doc_md="details", + params={"foo": 1}, + max_active_tasks=16, + max_active_runs=16, + ): + EmptyOperator(task_id=TASK_ID) + + self._create_deactivated_paused_dag(session) + + dag_maker.dagbag.sync_to_db() + dag_maker.dag_model.has_task_concurrency_limits = True + session.merge(dag_maker.dag_model) + session.commit() + + def teardown_method(self) -> None: + self._clear_db() + + +class TestGetDags(TestDagEndpoint): + """Unit tests for Get DAGs.""" - -@provide_session -def _create_deactivated_paused_dag(session=None): - dag_model = DagModel( - dag_id=DAG3_ID, - fileloc="/tmp/dag_del_1.py", - timetable_summary="2 2 * * *", - is_active=False, - is_paused=True, - owners="test_owner,another_test_owner", - next_dagrun=datetime(2021, 1, 1, 12, 0, 0, tzinfo=timezone.utc), - ) - - dagrun_failed = DagRun( - dag_id=DAG3_ID, - run_id="run1", - execution_date=datetime(2018, 1, 1, 12, 0, 0, tzinfo=timezone.utc), - start_date=datetime(2018, 1, 1, 12, 0, 0, tzinfo=timezone.utc), - run_type=DagRunType.SCHEDULED, - state=DagRunState.FAILED, - ) - - dagrun_success = DagRun( - dag_id=DAG3_ID, - run_id="run2", - execution_date=datetime(2019, 1, 1, 12, 0, 0, tzinfo=timezone.utc), - start_date=datetime(2019, 1, 1, 12, 0, 0, tzinfo=timezone.utc), - run_type=DagRunType.MANUAL, - state=DagRunState.SUCCESS, - ) - - session.add(dag_model) - session.add(dagrun_failed) - session.add(dagrun_success) - - -@pytest.fixture(autouse=True) -@provide_session -def setup(dag_maker, session=None) -> None: - clear_db_runs() - clear_db_dags() - clear_db_serialized_dags() - - with dag_maker( - DAG1_ID, - dag_display_name=DAG1_DISPLAY_NAME, - schedule=None, - start_date=datetime(2018, 6, 15, 0, 0, tzinfo=timezone.utc), - doc_md="details", - params={"foo": 1}, - tags=["example"], - ): - EmptyOperator(task_id=TASK_ID) - - dag_maker.create_dagrun(state=DagRunState.FAILED) - - with dag_maker( - DAG2_ID, - schedule=None, - start_date=DAG2_START_DATE, - doc_md="details", - params={"foo": 1}, - max_active_tasks=16, - max_active_runs=16, - ): - EmptyOperator(task_id=TASK_ID) - - dag_maker.dagbag.sync_to_db() - dag_maker.dag_model.has_task_concurrency_limits = True - session.merge(dag_maker.dag_model) - session.commit() - _create_deactivated_paused_dag() - - -class TestGetDags: @pytest.mark.parametrize( "query_params, expected_total_entries, expected_ids", [ @@ -161,7 +177,9 @@ def test_get_dags(self, test_client, query_params, expected_total_entries, expec assert [dag["dag_id"] for dag in body["dags"]] == expected_ids -class TestPatchDag: +class TestPatchDag(TestDagEndpoint): + """Unit tests for Patch DAG.""" + @pytest.mark.parametrize( "query_params, dag_id, body, expected_status_code, expected_is_paused", [ @@ -184,7 +202,9 @@ def test_patch_dag( assert body["is_paused"] == expected_is_paused -class TestPatchDags: +class TestPatchDags(TestDagEndpoint): + """Unit tests for Patch DAGs.""" + @pytest.mark.parametrize( "query_params, body, expected_status_code, expected_ids, expected_paused_ids", [ @@ -239,7 +259,9 @@ def test_patch_dags( assert paused_dag_ids == expected_paused_ids -class TestDagDetails: +class TestDagDetails(TestDagEndpoint): + """Unit tests for DAG Details.""" + @pytest.mark.parametrize( "query_params, dag_id, expected_status_code, dag_display_name, start_date", [ @@ -312,7 +334,9 @@ def test_dag_details( assert res_json == expected -class TestGetDag: +class TestGetDag(TestDagEndpoint): + """Unit tests for Get DAG.""" + @pytest.mark.parametrize( "query_params, dag_id, expected_status_code, dag_display_name", [ @@ -359,3 +383,61 @@ def test_get_dag(self, test_client, query_params, dag_id, expected_status_code, "pickle_id": None, } assert res_json == expected + + +class TestDeleteDAG(TestDagEndpoint): + """Unit tests for Delete DAG.""" + + def _create_dag_for_deletion( + self, + dag_maker, + dag_id=None, + dag_display_name=None, + has_running_dagruns=False, + ): + with dag_maker( + dag_id, + dag_display_name=dag_display_name, + start_date=datetime(2024, 10, 10, tzinfo=timezone.utc), + ): + EmptyOperator(task_id="dummy") + + if has_running_dagruns: + dr = dag_maker.create_dagrun() + ti = dr.get_task_instances()[0] + ti.set_state(TaskInstanceState.RUNNING) + + dag_maker.dagbag.sync_to_db() + + @pytest.mark.parametrize( + "dag_id, dag_display_name, status_code_delete, status_code_details, has_running_dagruns, is_create_dag", + [ + ("test_nonexistent_dag_id", "nonexistent_display_name", 404, 404, False, False), + (DAG4_ID, DAG4_DISPLAY_NAME, 204, 404, False, True), + (DAG5_ID, DAG5_DISPLAY_NAME, 409, 200, True, True), + ], + ) + def test_delete_dag( + self, + dag_maker, + test_client, + dag_id, + dag_display_name, + status_code_delete, + status_code_details, + has_running_dagruns, + is_create_dag, + ): + if is_create_dag: + self._create_dag_for_deletion( + dag_maker=dag_maker, + dag_id=dag_id, + dag_display_name=dag_display_name, + has_running_dagruns=has_running_dagruns, + ) + + delete_response = test_client.delete(f"{API_PREFIX}/{dag_id}") + assert delete_response.status_code == status_code_delete + + details_response = test_client.get(f"{API_PREFIX}/{dag_id}/details") + assert details_response.status_code == status_code_details From c471c31111958f0a4dde775f559d5c606f3149a8 Mon Sep 17 00:00:00 2001 From: Amogh Desai Date: Tue, 15 Oct 2024 15:10:52 +0530 Subject: [PATCH 122/125] Clarifying PLUGINS_FOLDER permissions by DAG authors (#43022) --- docs/apache-airflow/security/security_model.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/apache-airflow/security/security_model.rst b/docs/apache-airflow/security/security_model.rst index 0181710bda9dc..ebe1b35c54fab 100644 --- a/docs/apache-airflow/security/security_model.rst +++ b/docs/apache-airflow/security/security_model.rst @@ -212,12 +212,15 @@ DAG author to choose the code that will be executed in the scheduler or webserve should not be arbitrary code that DAG author can add in DAG folder. All those functionalities are only available via ``plugins`` and ``providers`` mechanisms where the code that is executed can only be provided by installed packages (or in case of plugins it can also be added to PLUGINS folder where DAG -authors should not have write access to). PLUGINS FOLDER is a legacy mechanism coming from Airflow 1.10 +authors should not have write access to). PLUGINS_FOLDER is a legacy mechanism coming from Airflow 1.10 - but we recommend using entrypoint mechanism that allows the Deployment Manager to - effectively - choose and register the code that will be executed in those contexts. DAG Author has no access to install or modify packages installed in Webserver and Scheduler, and this is the way to prevent the DAG Author to execute arbitrary code in those processes. +Additionally, if you decide to utilize and configure the PLUGINS_FOLDER, it is essential for the Deployment +Manager to ensure that the DAG author does not have write access to this folder. + The Deployment Manager might decide to introduce additional control mechanisms to prevent DAG authors from executing arbitrary code. This is all fully in hands of the Deployment Manager and it is discussed in the following chapter. From 2eaa90fd369ef9bd51e239ea2cd5c7641ffc4779 Mon Sep 17 00:00:00 2001 From: Gopal Dirisala <39794726+dirrao@users.noreply.github.com> Date: Tue, 15 Oct 2024 15:39:17 +0530 Subject: [PATCH 123/125] kubernetes package version bump to 31.0.0 (#42907) --- generated/provider_dependencies.json | 4 ++-- providers/src/airflow/providers/cncf/kubernetes/provider.yaml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index 49b457cd81a70..c91323180f7cc 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -370,8 +370,8 @@ "asgiref>=3.5.2", "cryptography>=41.0.0", "google-re2>=1.0", - "kubernetes>=29.0.0,<=30.1.0", - "kubernetes_asyncio>=29.0.0,<=30.1.0" + "kubernetes>=29.0.0,<=31.0.0", + "kubernetes_asyncio>=29.0.0,<=31.0.0" ], "devel-deps": [], "plugins": [], diff --git a/providers/src/airflow/providers/cncf/kubernetes/provider.yaml b/providers/src/airflow/providers/cncf/kubernetes/provider.yaml index 0849609b8ff80..980d552e15453 100644 --- a/providers/src/airflow/providers/cncf/kubernetes/provider.yaml +++ b/providers/src/airflow/providers/cncf/kubernetes/provider.yaml @@ -104,13 +104,13 @@ dependencies: # limiting minimum airflow version supported in cncf.kubernetes provider, due to the # potential breaking changes in Airflow Core as well (kubernetes is added as extra, so Airflow # core is not hard-limited via install-requirements, only by extra). - - kubernetes>=29.0.0,<=30.1.0 + - kubernetes>=29.0.0,<=31.0.0 # The Kubernetes_asyncio package is used for providing Asynchronous (AsyncIO) client library for # standard Kubernetes API. The version is limited by minimum 18.20.1 because of introducing the ability to # load kubernetes config file from dictionary in that release and is limited to the next MAJOR version # (started from current 24.2.2 version) to prevent introducing some problems that could be due to some # major changes in the package. - - kubernetes_asyncio>=29.0.0,<=30.1.0 + - kubernetes_asyncio>=29.0.0,<=31.0.0 - google-re2>=1.0 integrations: From 732e0564d47813dfb05b22b9b30a108d4668a3e5 Mon Sep 17 00:00:00 2001 From: saucoide <32314353+saucoide@users.noreply.github.com> Date: Wed, 16 Oct 2024 00:43:50 +1300 Subject: [PATCH 124/125] Make google provider pyarrow dependency explicit (#42996) * Make google provider pyarrow dependency explicit The provider already depends on it directly but it was not listed in provider.yaml * Add comment to the change * rerun hooks --- generated/provider_dependencies.json | 1 + providers/src/airflow/providers/google/provider.yaml | 2 ++ 2 files changed, 3 insertions(+) diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index c91323180f7cc..e025266eebf81 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -686,6 +686,7 @@ "pandas>=1.5.3,<2.2;python_version<\"3.9\"", "pandas>=2.1.2,<2.2;python_version>=\"3.9\"", "proto-plus>=1.19.6", + "pyarrow>=14.0.1", "python-slugify>=7.0.0", "sqlalchemy-bigquery>=1.2.1", "sqlalchemy-spanner>=1.6.2", diff --git a/providers/src/airflow/providers/google/provider.yaml b/providers/src/airflow/providers/google/provider.yaml index 196a035badc4d..86a98a6962757 100644 --- a/providers/src/airflow/providers/google/provider.yaml +++ b/providers/src/airflow/providers/google/provider.yaml @@ -170,6 +170,8 @@ dependencies: # A transient dependency of google-cloud-bigquery-datatransfer, but we # further constrain it since older versions are buggy. - proto-plus>=1.19.6 + # Used to write parquet files by BaseSqlToGCSOperator + - pyarrow>=14.0.1 - python-slugify>=7.0.0 - PyOpenSSL>=23.0.0 - sqlalchemy-bigquery>=1.2.1 From f38d56dbf4dc1639142fc5a494d5da24996a56cc Mon Sep 17 00:00:00 2001 From: Luyang Liu Date: Tue, 15 Oct 2024 23:30:19 +1100 Subject: [PATCH 125/125] Add search by dag_display_name_pattern on dag list page with rebase (#42896) * Add search by `dag_display_name_pattern` on dag list page. * refactor search parameter update logic * Refactor SearchBar component and simplify debounce logic in DagsList. * Refactor search input change handling for SearchBar and DagsList components * Update airflow/ui/src/pages/DagsList/DagsList.tsx Co-authored-by: Brent Bovenzi * Refactor search parameter handling and improve type consistency * Remove typo * Add `LAST_DAG_RUN_STATE` to `SearchParamsKeys` and update filters * Add LAST_DAG_RUN_STATE to SearchParamsKeys and update filters * Fix missing change for add SearchBar component by removing forwardRef and adding debounced search logic. * minor change for SearchBar and DagsFilters components * Optimize imports and improve formatting across components * refactor: move query options from autogenerated useDagServiceGetDags to DagsList. * Fix formatting. --------- Co-authored-by: Brent Bovenzi --- airflow/ui/package.json | 3 +- airflow/ui/pnpm-lock.yaml | 13 ++++ .../src/components/DataTable/searchParams.ts | 13 +++- airflow/ui/src/components/SearchBar.tsx | 62 +++++++++------ airflow/ui/src/constants/searchParams.ts | 31 ++++++++ airflow/ui/src/pages/DagsList/DagsFilters.tsx | 16 ++-- airflow/ui/src/pages/DagsList/DagsList.tsx | 78 +++++++++++++++---- 7 files changed, 169 insertions(+), 47 deletions(-) create mode 100644 airflow/ui/src/constants/searchParams.ts diff --git a/airflow/ui/package.json b/airflow/ui/package.json index 014564c1604bc..a7cf90bb5731b 100644 --- a/airflow/ui/package.json +++ b/airflow/ui/package.json @@ -28,7 +28,8 @@ "react": "^18.3.1", "react-dom": "^18.3.1", "react-icons": "^5.3.0", - "react-router-dom": "^6.26.2" + "react-router-dom": "^6.26.2", + "use-debounce": "^10.0.3" }, "devDependencies": { "@7nohe/openapi-react-query-codegen": "^1.6.0", diff --git a/airflow/ui/pnpm-lock.yaml b/airflow/ui/pnpm-lock.yaml index c9f94d35f7358..3b73df0fa8049 100644 --- a/airflow/ui/pnpm-lock.yaml +++ b/airflow/ui/pnpm-lock.yaml @@ -47,6 +47,9 @@ importers: react-router-dom: specifier: ^6.26.2 version: 6.26.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + use-debounce: + specifier: ^10.0.3 + version: 10.0.3(react@18.3.1) devDependencies: '@7nohe/openapi-react-query-codegen': specifier: ^1.6.0 @@ -3221,6 +3224,12 @@ packages: '@types/react': optional: true + use-debounce@10.0.3: + resolution: {integrity: sha512-DxQSI9ZKso689WM1mjgGU3ozcxU1TJElBJ3X6S4SMzMNcm2lVH0AHmyXB+K7ewjz2BSUKJTDqTcwtSMRfB89dg==} + engines: {node: '>= 16.0.0'} + peerDependencies: + react: '*' + use-isomorphic-layout-effect@1.1.2: resolution: {integrity: sha512-49L8yCO3iGT/ZF9QttjwLF/ZD9Iwto5LnH5LmEdk/6cFmXddqi2ulF0edxTwjj+7mqvpVVGQWvbXZdn32wRSHA==} peerDependencies: @@ -6892,6 +6901,10 @@ snapshots: optionalDependencies: '@types/react': 18.3.5 + use-debounce@10.0.3(react@18.3.1): + dependencies: + react: 18.3.1 + use-isomorphic-layout-effect@1.1.2(@types/react@18.3.5)(react@18.3.1): dependencies: react: 18.3.1 diff --git a/airflow/ui/src/components/DataTable/searchParams.ts b/airflow/ui/src/components/DataTable/searchParams.ts index 8cc57ad7b5bcf..39001b097f349 100644 --- a/airflow/ui/src/components/DataTable/searchParams.ts +++ b/airflow/ui/src/components/DataTable/searchParams.ts @@ -18,11 +18,18 @@ */ import type { SortingState } from "@tanstack/react-table"; +import { + SearchParamsKeys, + type SearchParamsKeysType, +} from "src/constants/searchParams"; + import type { TableState } from "./types"; -export const LIMIT_PARAM = "limit"; -export const OFFSET_PARAM = "offset"; -export const SORT_PARAM = "sort"; +const { + LIMIT: LIMIT_PARAM, + OFFSET: OFFSET_PARAM, + SORT: SORT_PARAM, +}: SearchParamsKeysType = SearchParamsKeys; export const stateToSearchParams = ( state: TableState, diff --git a/airflow/ui/src/components/SearchBar.tsx b/airflow/ui/src/components/SearchBar.tsx index 830f942677123..ad50a65b4311b 100644 --- a/airflow/ui/src/components/SearchBar.tsx +++ b/airflow/ui/src/components/SearchBar.tsx @@ -18,15 +18,19 @@ */ import { Button, - type ButtonProps, Input, InputGroup, - type InputGroupProps, InputLeftElement, - type InputProps, InputRightElement, + type ButtonProps, + type InputGroupProps, + type InputProps, } from "@chakra-ui/react"; +import type { ChangeEvent } from "react"; import { FiSearch } from "react-icons/fi"; +import { useDebouncedCallback } from "use-debounce"; + +const debounceDelay = 200; export const SearchBar = ({ buttonProps, @@ -36,23 +40,35 @@ export const SearchBar = ({ readonly buttonProps?: ButtonProps; readonly groupProps?: InputGroupProps; readonly inputProps?: InputProps; -}) => ( - - - - - - - - - -); +}) => { + const handleSearchChange = useDebouncedCallback( + (event: ChangeEvent) => inputProps?.onChange?.(event), + debounceDelay, + ); + + return ( + + + + + + + + + + ); +}; diff --git a/airflow/ui/src/constants/searchParams.ts b/airflow/ui/src/constants/searchParams.ts new file mode 100644 index 0000000000000..893a4461bffe7 --- /dev/null +++ b/airflow/ui/src/constants/searchParams.ts @@ -0,0 +1,31 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +export enum SearchParamsKeys { + LAST_DAG_RUN_STATE = "last_dag_run_state", + LIMIT = "limit", + NAME_PATTERN = "name_pattern", + OFFSET = "offset", + PAUSED = "paused", + SORT = "sort", +} + +export type SearchParamsKeysType = Record< + keyof typeof SearchParamsKeys, + string +>; diff --git a/airflow/ui/src/pages/DagsList/DagsFilters.tsx b/airflow/ui/src/pages/DagsList/DagsFilters.tsx index 6316816f48f95..3d507ace365db 100644 --- a/airflow/ui/src/pages/DagsList/DagsFilters.tsx +++ b/airflow/ui/src/pages/DagsList/DagsFilters.tsx @@ -23,15 +23,21 @@ import { useSearchParams } from "react-router-dom"; import { useTableURLState } from "src/components/DataTable/useTableUrlState"; import { QuickFilterButton } from "src/components/QuickFilterButton"; +import { + SearchParamsKeys, + type SearchParamsKeysType, +} from "src/constants/searchParams"; -const PAUSED_PARAM = "paused"; -const STATE_PARAM = "last_dag_run_state"; +const { + LAST_DAG_RUN_STATE: LAST_DAG_RUN_STATE_PARAM, + PAUSED: PAUSED_PARAM, +}: SearchParamsKeysType = SearchParamsKeys; export const DagsFilters = () => { const [searchParams, setSearchParams] = useSearchParams(); const showPaused = searchParams.get(PAUSED_PARAM); - const state = searchParams.get(STATE_PARAM); + const state = searchParams.get(LAST_DAG_RUN_STATE_PARAM); const isAll = state === null; const isRunning = state === "running"; const isFailed = state === "failed"; @@ -61,9 +67,9 @@ export const DagsFilters = () => { useCallback( ({ currentTarget: { value } }) => { if (value === "all") { - searchParams.delete(STATE_PARAM); + searchParams.delete(LAST_DAG_RUN_STATE_PARAM); } else { - searchParams.set(STATE_PARAM, value); + searchParams.set(LAST_DAG_RUN_STATE_PARAM, value); } setSearchParams(searchParams); setTableURLState({ diff --git a/airflow/ui/src/pages/DagsList/DagsList.tsx b/airflow/ui/src/pages/DagsList/DagsList.tsx index 60d6ef9c4f437..623b8a3b4bacc 100644 --- a/airflow/ui/src/pages/DagsList/DagsList.tsx +++ b/airflow/ui/src/pages/DagsList/DagsList.tsx @@ -25,7 +25,12 @@ import { VStack, } from "@chakra-ui/react"; import type { ColumnDef } from "@tanstack/react-table"; -import { type ChangeEventHandler, useCallback, useState } from "react"; +import { + type ChangeEvent, + type ChangeEventHandler, + useCallback, + useState, +} from "react"; import { useSearchParams } from "react-router-dom"; import { useDagServiceGetDags } from "openapi/queries"; @@ -37,6 +42,10 @@ import { useTableURLState } from "src/components/DataTable/useTableUrlState"; import { ErrorAlert } from "src/components/ErrorAlert"; import { SearchBar } from "src/components/SearchBar"; import { TogglePause } from "src/components/TogglePause"; +import { + SearchParamsKeys, + type SearchParamsKeysType, +} from "src/constants/searchParams"; import { pluralize } from "src/utils/pluralize"; import { DagCard } from "./DagCard"; @@ -90,6 +99,12 @@ const columns: Array> = [ }, ]; +const { + LAST_DAG_RUN_STATE: LAST_DAG_RUN_STATE_PARAM, + NAME_PATTERN: NAME_PATTERN_PARAM, + PAUSED: PAUSED_PARAM, +}: SearchParamsKeysType = SearchParamsKeys; + const cardDef: CardDef = { card: ({ row }) => , meta: { @@ -97,31 +112,61 @@ const cardDef: CardDef = { }, }; -const PAUSED_PARAM = "paused"; -const STATE_PARAM = "last_dag_run_state"; - export const DagsList = () => { - const [searchParams] = useSearchParams(); + const [searchParams, setSearchParams] = useSearchParams(); const [display, setDisplay] = useState<"card" | "table">("card"); const showPaused = searchParams.get(PAUSED_PARAM); - const lastDagRunState = searchParams.get(STATE_PARAM) as DagRunState; + const lastDagRunState = searchParams.get( + LAST_DAG_RUN_STATE_PARAM, + ) as DagRunState; const { setTableURLState, tableURLState } = useTableURLState(); const { pagination, sorting } = tableURLState; + const [dagDisplayNamePattern, setDagDisplayNamePattern] = useState( + searchParams.get(NAME_PATTERN_PARAM) ?? undefined, + ); // TODO: update API to accept multiple orderBy params const [sort] = sorting; const orderBy = sort ? `${sort.desc ? "-" : ""}${sort.id}` : undefined; - const { data, error, isFetching, isLoading } = useDagServiceGetDags({ - lastDagRunState, - limit: pagination.pageSize, - offset: pagination.pageIndex * pagination.pageSize, - onlyActive: true, - orderBy, - paused: showPaused === null ? undefined : showPaused === "true", - }); + const handleSearchChange = ({ + target: { value }, + }: ChangeEvent) => { + if (value) { + searchParams.set(NAME_PATTERN_PARAM, value); + } else { + searchParams.delete(NAME_PATTERN_PARAM); + } + setSearchParams(searchParams); + setTableURLState({ + pagination: { ...pagination, pageIndex: 0 }, + sorting, + }); + setDagDisplayNamePattern(value); + }; + + const { data, error, isFetching, isLoading } = useDagServiceGetDags( + { + dagDisplayNamePattern: Boolean(dagDisplayNamePattern) + ? `%${dagDisplayNamePattern}%` + : undefined, + lastDagRunState, + limit: pagination.pageSize, + offset: pagination.pageIndex * pagination.pageSize, + onlyActive: true, + orderBy, + paused: showPaused === null ? undefined : showPaused === "true", + }, + [dagDisplayNamePattern, showPaused], + { + refetchOnMount: true, + refetchOnReconnect: false, + refetchOnWindowFocus: false, + staleTime: 5 * 60 * 1000, + }, + ); const handleSortChange = useCallback>( ({ currentTarget: { value } }) => { @@ -140,7 +185,10 @@ export const DagsList = () => {